id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
142,148 |
Karaage-Cluster/python-tldap
|
tldap/database/__init__.py
|
tldap.database.SearchOptions
|
class SearchOptions:
""" Application specific search options. """
def __init__(self, base_dn: str, object_class: Set[str], pk_field: str) -> None:
self.base_dn = base_dn
self.object_class = object_class
self.pk_field = pk_field
|
class SearchOptions:
''' Application specific search options. '''
def __init__(self, base_dn: str, object_class: Set[str], pk_field: str) -> None:
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 1 | 0.2 | 0 | 1 | 0 | 0 | 1 | 3 | 1 | 1 | 6 | 0 | 5 | 5 | 3 | 1 | 5 | 5 | 3 | 1 | 0 | 0 | 1 |
142,149 |
Karaage-Cluster/python-tldap
|
tldap/database/__init__.py
|
tldap.database.NotLoadedObject
|
class NotLoadedObject(NotLoaded):
""" Represents a single object that needs to be loaded. """
def __init__(self, *, table: LdapObjectClass, key: str, value: str):
self._table = table
self._key = key
self._value = value
def __repr__(self):
return f"<NotLoadedObject {self._table} {self._key}={self._value}>"
def load(self, database: Optional[Database] = None) -> LdapObject:
return self._load_one(self._table, self._key, self._value, database)
|
class NotLoadedObject(NotLoaded):
''' Represents a single object that needs to be loaded. '''
def __init__(self, *, table: LdapObjectClass, key: str, value: str):
pass
def __repr__(self):
pass
def load(self, database: Optional[Database] = None) -> LdapObject:
pass
| 4 | 1 | 3 | 0 | 3 | 0 | 1 | 0.11 | 1 | 3 | 2 | 0 | 3 | 3 | 3 | 7 | 12 | 2 | 9 | 7 | 5 | 1 | 9 | 7 | 5 | 1 | 1 | 0 | 3 |
142,150 |
Karaage-Cluster/python-tldap
|
tldap/database/__init__.py
|
tldap.database.NotLoadedList
|
class NotLoadedList(NotLoaded):
""" Represents a list of objects that needs to be loaded via a single key. """
def __init__(self, *, table: LdapObjectClass, key: str, value: str):
self._table = table
self._key = key
self._value = value
def __repr__(self):
return f"<NotLoadedList {self._table} {self._key}={self._value}>"
def load(self, database: Optional[Database] = None) -> List[LdapObject]:
return self._load_list(self._table, self._key, self._value, database)
|
class NotLoadedList(NotLoaded):
''' Represents a list of objects that needs to be loaded via a single key. '''
def __init__(self, *, table: LdapObjectClass, key: str, value: str):
pass
def __repr__(self):
pass
def load(self, database: Optional[Database] = None) -> List[LdapObject]:
pass
| 4 | 1 | 3 | 0 | 3 | 0 | 1 | 0.11 | 1 | 3 | 2 | 0 | 3 | 3 | 3 | 7 | 13 | 3 | 9 | 7 | 5 | 1 | 9 | 7 | 5 | 1 | 1 | 0 | 3 |
142,151 |
Karaage-Cluster/python-tldap
|
tldap/backend/__init__.py
|
tldap.backend.DefaultConnectionProxy
|
class DefaultConnectionProxy(object):
"""
Proxy for accessing the default DatabaseWrapper object's attributes. If you
need to access the DatabaseWrapper object itself, use
connections[DEFAULT_LDAP_ALIAS] instead.
"""
def __getattr__(self, item):
return getattr(connections[DEFAULT_LDAP_ALIAS], item)
def __setattr__(self, name, value):
return setattr(connections[DEFAULT_LDAP_ALIAS], name, value)
def __delattr__(self, name):
return delattr(connections[DEFAULT_LDAP_ALIAS], name)
def __eq__(self, other):
return connections[DEFAULT_LDAP_ALIAS] == other
def __ne__(self, other):
return connections[DEFAULT_LDAP_ALIAS] != other
|
class DefaultConnectionProxy(object):
'''
Proxy for accessing the default DatabaseWrapper object's attributes. If you
need to access the DatabaseWrapper object itself, use
connections[DEFAULT_LDAP_ALIAS] instead.
'''
def __getattr__(self, item):
pass
def __setattr__(self, name, value):
pass
def __delattr__(self, name):
pass
def __eq__(self, other):
pass
def __ne__(self, other):
pass
| 6 | 1 | 2 | 0 | 2 | 0 | 1 | 0.45 | 1 | 0 | 0 | 0 | 5 | 0 | 5 | 5 | 20 | 4 | 11 | 6 | 5 | 5 | 11 | 6 | 5 | 1 | 1 | 0 | 5 |
142,152 |
Karaage-Cluster/python-tldap
|
tests/django/database.py
|
tests.django.database.Group
|
class Group(parent.Group):
@classmethod
def on_save(cls, changes: Changeset, database: Database) -> Changeset:
return changes
|
class Group(parent.Group):
@classmethod
def on_save(cls, changes: Changeset, database: Database) -> Changeset:
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 2 | 2 | 0 | 0 | 0 | 1 | 28 | 5 | 1 | 4 | 3 | 1 | 0 | 3 | 2 | 1 | 1 | 3 | 0 | 1 |
142,153 |
Karaage-Cluster/python-tldap
|
tests/django/database.py
|
tests.django.database.Account
|
class Account(parent.Account):
@classmethod
def on_save(cls, changes: Changeset, database: Database) -> Changeset:
return changes
|
class Account(parent.Account):
@classmethod
def on_save(cls, changes: Changeset, database: Database) -> Changeset:
pass
| 3 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 2 | 2 | 0 | 0 | 0 | 1 | 26 | 5 | 1 | 4 | 3 | 1 | 0 | 3 | 2 | 1 | 1 | 3 | 0 | 1 |
142,154 |
Karaage-Cluster/python-tldap
|
tldap/fields.py
|
tldap.fields.UnicodeField
|
class UnicodeField(Field):
""" Field contains a UTF16 character string. """
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
value = value.encode("utf_16le")
return value
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be a bytes")
value = value.decode("utf_16")
return value
def value_validate(self, value):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
if not isinstance(value, six.string_types):
raise tldap.exceptions.ValidationError("should be a string")
|
class UnicodeField(Field):
''' Field contains a UTF16 character string. '''
def value_to_db(self, value):
''' Returns field's single value prepared for saving into a database. '''
pass
def value_to_python(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
def value_validate(self, value):
'''
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
'''
pass
| 4 | 4 | 7 | 0 | 4 | 4 | 2 | 1 | 1 | 2 | 1 | 0 | 3 | 0 | 3 | 13 | 27 | 3 | 12 | 4 | 8 | 12 | 12 | 4 | 8 | 2 | 2 | 1 | 5 |
142,155 |
Karaage-Cluster/python-tldap
|
tests/database.py
|
tests.database.OU
|
class OU(LdapObject):
@classmethod
def get_fields(cls) -> Dict[str, tldap.fields.Field]:
fields = helpers.get_fields_common()
return fields
@classmethod
def get_search_options(cls, database: Database) -> SearchOptions:
return SearchOptions(
base_dn="",
object_class={'organizationalUnit'},
pk_field="ou",
)
@classmethod
def on_load(cls, python_data: LdapObject, _database: Database) -> LdapObject:
return python_data
@classmethod
def on_save(cls, changes: Changeset, _database: Database) -> Changeset:
changes = helpers.set_object_class(changes, ['top', 'organizationalUnit'])
return changes
|
class OU(LdapObject):
@classmethod
def get_fields(cls) -> Dict[str, tldap.fields.Field]:
pass
@classmethod
def get_search_options(cls, database: Database) -> SearchOptions:
pass
@classmethod
def on_load(cls, python_data: LdapObject, _database: Database) -> LdapObject:
pass
@classmethod
def on_save(cls, changes: Changeset, _database: Database) -> Changeset:
pass
| 9 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 5 | 4 | 0 | 0 | 0 | 4 | 25 | 23 | 4 | 19 | 10 | 10 | 0 | 11 | 6 | 6 | 1 | 2 | 0 | 4 |
142,156 |
Karaage-Cluster/python-tldap
|
tests/database.py
|
tests.database.Group
|
class Group(LdapObject):
@classmethod
def get_fields(cls) -> Dict[str, tldap.fields.Field]:
fields = {
**helpers.get_fields_common(),
**helpers.get_fields_group(),
}
return fields
@classmethod
def get_search_options(cls, database: Database) -> SearchOptions:
settings = database.settings
return SearchOptions(
base_dn=settings['LDAP_GROUP_BASE'],
object_class={'posixGroup'},
pk_field="cn",
)
@classmethod
def on_load(cls, python_data: LdapObject, _database: Database) -> LdapObject:
python_data = helpers.load_group(python_data, Account)
return python_data
@classmethod
def on_save(cls, changes: Changeset, database: Database) -> Changeset:
settings = database.settings
changes = helpers.save_group(changes)
changes = dhelpers.save_group(changes, Group, database)
changes = helpers.set_object_class(changes, ['top', 'posixGroup'])
changes = helpers.rdn_to_dn(changes, 'cn', settings['LDAP_GROUP_BASE'])
return changes
@classmethod
def add_member(cls, changes: Changeset, member: 'Account') -> Changeset:
assert isinstance(changes.src, cls)
return helpers.add_group_member(changes, member)
@classmethod
def remove_member(cls, changes: Changeset, member: 'Account') -> Changeset:
assert isinstance(changes.src, cls)
return helpers.remove_group_member(changes, member)
|
class Group(LdapObject):
@classmethod
def get_fields(cls) -> Dict[str, tldap.fields.Field]:
pass
@classmethod
def get_search_options(cls, database: Database) -> SearchOptions:
pass
@classmethod
def on_load(cls, python_data: LdapObject, _database: Database) -> LdapObject:
pass
@classmethod
def on_save(cls, changes: Changeset, database: Database) -> Changeset:
pass
@classmethod
def add_member(cls, changes: Changeset, member: 'Account') -> Changeset:
pass
@classmethod
def remove_member(cls, changes: Changeset, member: 'Account') -> Changeset:
pass
| 13 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 6 | 5 | 1 | 0 | 0 | 6 | 27 | 42 | 6 | 36 | 16 | 23 | 0 | 23 | 10 | 16 | 1 | 2 | 0 | 6 |
142,157 |
Karaage-Cluster/python-tldap
|
tldap/backend/fake_transactions.py
|
tldap.backend.fake_transactions.LDAPwrapper
|
class LDAPwrapper(LdapBase):
""" The LDAP connection class. """
def __init__(self, settings_dict: dict) -> None:
super(LDAPwrapper, self).__init__(settings_dict)
self._transactions: List[List[UpdateCallable]] = []
####################
# Cache Management #
####################
def reset(self, force_flush_cache: bool = False) -> None:
"""
Reset transaction back to original state, discarding all
uncompleted transactions.
"""
super(LDAPwrapper, self).reset()
if len(self._transactions) == 0:
raise RuntimeError("reset called outside a transaction.")
self._transactions[-1] = []
def _cache_get_for_dn(self, dn: str) -> Dict[str, bytes]:
"""
Object state is cached. When an update is required the update will be
simulated on this cache, so that rollback information can be correct.
This function retrieves the cached data.
"""
# no cached item, retrieve from ldap
self._do_with_retry(
lambda obj: obj.search(
dn,
'(objectclass=*)',
ldap3.BASE,
attributes=['*', '+']))
results = self._obj.response
if len(results) < 1:
raise NoSuchObject("No results finding current value")
if len(results) > 1:
raise RuntimeError("Too many results finding current value")
return results[0]['raw_attributes']
##########################
# Transaction Management #
##########################
def is_dirty(self) -> bool:
""" Are there uncommitted changes? """
if len(self._transactions) == 0:
raise RuntimeError("is_dirty called outside a transaction.")
if len(self._transactions[-1]) > 0:
return True
return False
def is_managed(self) -> bool:
""" Are we inside transaction management? """
return len(self._transactions) > 0
def enter_transaction_management(self) -> None:
""" Start a transaction. """
self._transactions.append([])
def leave_transaction_management(self) -> None:
"""
End a transaction. Must not be dirty when doing so. ie. commit() or
rollback() must be called if changes made. If dirty, changes will be
discarded.
"""
if len(self._transactions) == 0:
raise RuntimeError("leave_transaction_management called outside transaction")
elif len(self._transactions[-1]) > 0:
raise RuntimeError("leave_transaction_management called with uncommited rollbacks")
else:
self._transactions.pop()
def commit(self) -> None:
"""
Attempt to commit all changes to LDAP database. i.e. forget all
rollbacks. However stay inside transaction management.
"""
if len(self._transactions) == 0:
raise RuntimeError("commit called outside transaction")
# If we have nested transactions, we don't actually commit, but push
# rollbacks up to previous transaction.
if len(self._transactions) > 1:
for on_rollback in reversed(self._transactions[-1]):
self._transactions[-2].insert(0, on_rollback)
_debug("commit")
self.reset()
def rollback(self) -> None:
"""
Roll back to previous database state. However stay inside transaction
management.
"""
if len(self._transactions) == 0:
raise RuntimeError("rollback called outside transaction")
_debug("rollback:", self._transactions[-1])
# if something goes wrong here, nothing we can do about it, leave
# database as is.
try:
# for every rollback action ...
for on_rollback in self._transactions[-1]:
# execute it
_debug("--> rolling back", on_rollback)
self._do_with_retry(on_rollback)
except: # noqa: E722
_debug("--> rollback failed")
exc_class, exc, tb = sys.exc_info()
raise tldap.exceptions.RollbackError(
"FATAL Unrecoverable rollback error: %r" % exc)
finally:
# reset everything to clean state
_debug("--> rollback success")
self.reset()
def _process(self, on_commit: UpdateCallable, on_rollback: UpdateCallable) -> Any:
"""
Process action. oncommit is a callback to execute action, onrollback is
a callback to execute if the oncommit() has been called and a rollback
is required
"""
_debug("---> commiting", on_commit)
result = self._do_with_retry(on_commit)
if len(self._transactions) > 0:
# add statement to rollback log in case something goes wrong
self._transactions[-1].insert(0, on_rollback)
return result
##################################
# Functions needing Transactions #
##################################
def add(self, dn: str, mod_list: dict) -> None:
"""
Add a DN to the LDAP database; See ldap module. Doesn't return a result
if transactions enabled.
"""
_debug("add", self, dn, mod_list)
# if rollback of add required, delete it
def on_commit(obj):
obj.add(dn, None, mod_list)
def on_rollback(obj):
obj.delete(dn)
# process this action
return self._process(on_commit, on_rollback)
def modify(self, dn: str, mod_list: dict) -> None:
"""
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
"""
_debug("modify", self, dn, mod_list)
# need to work out how to reverse changes in mod_list; result in revlist
revlist = {}
# get the current cached attributes
result = self._cache_get_for_dn(dn)
# find the how to reverse mod_list (for rollback) and put result in
# revlist. Also simulate actions on cache.
for mod_type, l in six.iteritems(mod_list):
for mod_op, mod_vals in l:
_debug("attribute:", mod_type)
if mod_type in result:
_debug("attribute cache:", result[mod_type])
else:
_debug("attribute cache is empty")
_debug("attribute modify:", (mod_op, mod_vals))
if mod_vals is not None:
if not isinstance(mod_vals, list):
mod_vals = [mod_vals]
if mod_op == ldap3.MODIFY_ADD:
# reverse of MODIFY_ADD is MODIFY_DELETE
reverse = (ldap3.MODIFY_DELETE, mod_vals)
elif mod_op == ldap3.MODIFY_DELETE and len(mod_vals) > 0:
# Reverse of MODIFY_DELETE is MODIFY_ADD, but only if value
# is given if mod_vals is None, this means all values where
# deleted.
reverse = (ldap3.MODIFY_ADD, mod_vals)
elif mod_op == ldap3.MODIFY_DELETE \
or mod_op == ldap3.MODIFY_REPLACE:
if mod_type in result:
# If MODIFY_DELETE with no values or MODIFY_REPLACE
# then we have to replace all attributes with cached
# state
reverse = (
ldap3.MODIFY_REPLACE,
tldap.modlist.escape_list(result[mod_type])
)
else:
# except if we have no cached state for this DN, in
# which case we delete it.
reverse = (ldap3.MODIFY_DELETE, [])
else:
raise RuntimeError("mod_op of %d not supported" % mod_op)
reverse = [reverse]
_debug("attribute reverse:", reverse)
if mod_type in result:
_debug("attribute cache:", result[mod_type])
else:
_debug("attribute cache is empty")
revlist[mod_type] = reverse
_debug("--")
_debug("mod_list:", mod_list)
_debug("revlist:", revlist)
_debug("--")
# now the hard stuff is over, we get to the easy stuff
def on_commit(obj):
obj.modify(dn, mod_list)
def on_rollback(obj):
obj.modify(dn, revlist)
return self._process(on_commit, on_rollback)
def modify_no_rollback(self, dn: str, mod_list: dict):
"""
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
"""
_debug("modify_no_rollback", self, dn, mod_list)
result = self._do_with_retry(lambda obj: obj.modify_s(dn, mod_list))
_debug("--")
return result
def delete(self, dn: str) -> None:
"""
delete a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
_debug("delete", self)
# get copy of cache
result = self._cache_get_for_dn(dn)
# remove special values that can't be added
def delete_attribute(name):
if name in result:
del result[name]
delete_attribute('entryUUID')
delete_attribute('structuralObjectClass')
delete_attribute('modifiersName')
delete_attribute('subschemaSubentry')
delete_attribute('entryDN')
delete_attribute('modifyTimestamp')
delete_attribute('entryCSN')
delete_attribute('createTimestamp')
delete_attribute('creatorsName')
delete_attribute('hasSubordinates')
delete_attribute('pwdFailureTime')
delete_attribute('pwdChangedTime')
# turn into mod_list list.
mod_list = tldap.modlist.addModlist(result)
_debug("revlist:", mod_list)
# on commit carry out action; on rollback restore cached state
def on_commit(obj):
obj.delete(dn)
def on_rollback(obj):
obj.add(dn, None, mod_list)
return self._process(on_commit, on_rollback)
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:
"""
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
_debug("rename", self, dn, new_rdn, new_base_dn)
# split up the parameters
split_dn = tldap.dn.str2dn(dn)
split_newrdn = tldap.dn.str2dn(new_rdn)
assert (len(split_newrdn) == 1)
# make dn unqualified
rdn = tldap.dn.dn2str(split_dn[0:1])
# make newrdn fully qualified dn
tmplist = [split_newrdn[0]]
if new_base_dn is not None:
tmplist.extend(tldap.dn.str2dn(new_base_dn))
old_base_dn = tldap.dn.dn2str(split_dn[1:])
else:
tmplist.extend(split_dn[1:])
old_base_dn = None
newdn = tldap.dn.dn2str(tmplist)
_debug("--> commit ", self, dn, new_rdn, new_base_dn)
_debug("--> rollback", self, newdn, rdn, old_base_dn)
# on commit carry out action; on rollback reverse rename
def on_commit(obj):
obj.modify_dn(dn, new_rdn, new_superior=new_base_dn)
def on_rollback(obj):
obj.modify_dn(newdn, rdn, new_superior=old_base_dn)
return self._process(on_commit, on_rollback)
def fail(self) -> None:
""" for testing purposes only. always fail in commit """
_debug("fail")
# on commit carry out action; on rollback reverse rename
def on_commit(_obj):
raise_testfailure("commit")
def on_rollback(_obj):
raise_testfailure("rollback")
return self._process(on_commit, on_rollback)
|
class LDAPwrapper(LdapBase):
''' The LDAP connection class. '''
def __init__(self, settings_dict: dict) -> None:
pass
def reset(self, force_flush_cache: bool = False) -> None:
'''
Reset transaction back to original state, discarding all
uncompleted transactions.
'''
pass
def _cache_get_for_dn(self, dn: str) -> Dict[str, bytes]:
'''
Object state is cached. When an update is required the update will be
simulated on this cache, so that rollback information can be correct.
This function retrieves the cached data.
'''
pass
def is_dirty(self) -> bool:
''' Are there uncommitted changes? '''
pass
def is_managed(self) -> bool:
''' Are we inside transaction management? '''
pass
def enter_transaction_management(self) -> None:
''' Start a transaction. '''
pass
def leave_transaction_management(self) -> None:
'''
End a transaction. Must not be dirty when doing so. ie. commit() or
rollback() must be called if changes made. If dirty, changes will be
discarded.
'''
pass
def commit(self) -> None:
'''
Attempt to commit all changes to LDAP database. i.e. forget all
rollbacks. However stay inside transaction management.
'''
pass
def rollback(self) -> None:
'''
Roll back to previous database state. However stay inside transaction
management.
'''
pass
def _process(self, on_commit: UpdateCallable, on_rollback: UpdateCallable) -> Any:
'''
Process action. oncommit is a callback to execute action, onrollback is
a callback to execute if the oncommit() has been called and a rollback
is required
'''
pass
def add(self, dn: str, mod_list: dict) -> None:
'''
Add a DN to the LDAP database; See ldap module. Doesn't return a result
if transactions enabled.
'''
pass
def on_commit(obj):
pass
def on_rollback(obj):
pass
def modify(self, dn: str, mod_list: dict) -> None:
'''
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
'''
pass
def on_commit(obj):
pass
def on_rollback(obj):
pass
def modify_no_rollback(self, dn: str, mod_list: dict):
'''
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
'''
pass
def delete(self, dn: str) -> None:
'''
delete a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
'''
pass
def delete_attribute(name):
pass
def on_commit(obj):
pass
def on_rollback(obj):
pass
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:
'''
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
'''
pass
def on_commit(obj):
pass
def on_rollback(obj):
pass
def fail(self) -> None:
''' for testing purposes only. always fail in commit '''
pass
def on_commit(obj):
pass
def on_rollback(obj):
pass
| 28 | 16 | 12 | 2 | 7 | 3 | 2 | 0.53 | 1 | 11 | 2 | 0 | 16 | 1 | 16 | 36 | 343 | 68 | 180 | 48 | 152 | 96 | 160 | 48 | 132 | 11 | 2 | 4 | 53 |
142,158 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_database.py
|
tests.a_unit.test_database.TestModelAccount
|
class TestModelAccount:
def test_create(self, defaults, mock_ldap):
""" Test create LDAP object. """
c = mock_ldap
account_attributes = defaults.account_attributes
# Create the object.
account = tests.database.Account()
account = account.merge(account_attributes)
account = tldap.database.insert(account)
# Simulate required attributes that should be added.
expected_values = dict(account_attributes)
expected_values.update({
'gecos': "Tux Torvalds",
'displayName': "Tux Torvalds",
'shadowLastChange': mock.ANY,
'userPassword': mock.ANY,
'dn': "uid=tux,ou=People,dc=python-ldap,dc=org",
})
python_expected_values = get_python_expected_values(expected_values)
db_expected_values = get_db_expected_values(expected_values, tests.database.Account)
# Assert that we made the correct calls to the backend.
expected_calls = [
mock.call.add(
'uid=tux,ou=People,dc=python-ldap,dc=org',
db_expected_values,
)
]
c.assert_has_calls(expected_calls)
# Assert caches are correct.
for key, value in python_expected_values.items():
assert account[key] == value, key
def test_create_with_dn(self, defaults, mock_ldap):
""" Test create LDAP object. """
c = mock_ldap
account_attributes = defaults.account_attributes
# Create the object.
account = tests.database.Account()
account = account.merge(account_attributes)
account = account.merge({'dn': "uid=penguin,ou=People,dc=python-ldap,dc=org"})
account = tldap.database.insert(account)
# Simulate required attributes that should be added.
expected_values = dict(account_attributes)
expected_values.update({
'gecos': "Tux Torvalds",
'displayName': "Tux Torvalds",
'shadowLastChange': mock.ANY,
'userPassword': mock.ANY,
'dn': "uid=penguin,ou=People,dc=python-ldap,dc=org",
})
python_expected_values = get_python_expected_values(expected_values)
db_expected_values = get_db_expected_values(expected_values, tests.database.Account)
# Assert that we made the correct calls to the backend.
expected_calls = [
mock.call.add(
'uid=penguin,ou=People,dc=python-ldap,dc=org',
db_expected_values,
)
]
c.assert_has_calls(expected_calls)
# Assert caches are correct.
for key, value in python_expected_values.items():
assert account[key] == value, key
def test_search(self, defaults, mock_ldap, account1, group1):
""" Test delete LDAP object. """
c = mock_ldap
c.search = SearchMock()
account1 = account1.merge({
'primary_group': group1,
})
c.search.add_result(b"uid=tux", account1)
results = tldap.database.search(tests.database.Account, Q(uid='does_not_exist'))
assert list(results) == []
results = tldap.database.search(tests.database.Account, Q(uid='tux'))
results = list(results)
assert len(results) == 1
account = results[0]
account_attributes = defaults.account_attributes
expected_values = dict(account_attributes)
expected_values.update({
'gecos': "Tux Torvalds",
'displayName': "Tux Torvalds",
'shadowLastChange': mock.ANY,
'userPassword': mock.ANY,
'dn': "uid=tux,ou=People,dc=python-ldap,dc=org",
})
python_expected_values = get_python_expected_values(expected_values)
# Assert caches are correct.
for key, value in python_expected_values.items():
assert account[key] == value, key
def test_search_by_dn(self, mock_ldap, account1):
""" Test getting a person. """
c = mock_ldap
c.search = SearchMock()
c.search.add_result(
b"entryDN:=uid=tux, ou=People, dc=python-ldap,dc=org", account1)
person = tldap.database.get_one(
tests.database.Account,
Q(dn="uid=tux, ou=People, dc=python-ldap,dc=org"))
assert person.get_as_single('uid') == "tux"
expected_calls = [(
'ou=People, dc=python-ldap,dc=org',
'SUBTREE',
b'(&'
b'(objectClass=inetOrgPerson)'
b'(objectClass=organizationalPerson)'
b'(objectClass=person)'
b'(entryDN:=uid=tux, ou=People, dc=python-ldap,dc=org)'
b')',
mock.ANY,
None
)]
assert c.search.calls == expected_calls
def test_delete(self, mock_ldap, account1):
""" Test delete LDAP object. """
c = mock_ldap
# Delete the object.
tldap.database.delete(account1)
# Assert that we made the correct calls to the backend.
expected_calls = [
mock.call.delete(
'uid=tux,ou=People,dc=python-ldap,dc=org',
)
]
c.assert_has_calls(expected_calls)
def test_rename(self, defaults, mock_ldap, account1):
""" Test rename LDAP object. """
c = mock_ldap
# Rename the object.
account1 = tldap.database.rename(account1, uid='tuz')
# Simulate required attributes that should be added.
expected_values = dict(defaults.account_attributes)
expected_values.update({
'uid': "tuz",
})
python_expected_values = get_python_expected_values(expected_values)
# Assert that we made the correct calls to the backend.
expected_calls = [
mock.call.rename(
'uid=tux,ou=People,dc=python-ldap,dc=org',
'uid=tuz',
None,
)
]
c.assert_has_calls(expected_calls)
# Assert caches are correct.
assert account1.get_as_single('dn') == "uid=tuz,ou=People,dc=python-ldap,dc=org"
for key, value in python_expected_values.items():
assert account1[key] == value, key
def test_move(self, defaults, mock_ldap, account1):
""" Test move LDAP object. """
c = mock_ldap
# Move the object.
account1 = tldap.database.rename(account1, "ou=Groups, dc=python-ldap,dc=org")
# Simulate required attributes that should be added.
expected_values = dict(defaults.account_attributes)
python_expected_values = get_python_expected_values(expected_values)
# Assert that we made the correct calls to the backend.
expected_calls = [
mock.call.rename(
'uid=tux,ou=People,dc=python-ldap,dc=org',
'uid=tux',
'ou=Groups, dc=python-ldap,dc=org',
)
]
c.assert_has_calls(expected_calls)
# Assert caches are correct.
assert account1.get_as_single('dn') == "uid=tux,ou=Groups,dc=python-ldap,dc=org"
for key, value in python_expected_values.items():
assert account1[key] == value, key
def test_add_attribute(self, defaults, mock_ldap, account1):
""" Test add new attribute. """
c = mock_ldap
# Replace the attribute.
changes = tldap.database.changeset(account1, {'title': "Superior"})
account1 = tldap.database.save(changes)
# Simulate required attributes that should be added.
expected_values = dict(defaults.account_attributes)
expected_values.update({
'title': "Superior"
})
python_expected_values = get_python_expected_values(expected_values)
# Assert that we made the correct calls to the backend.
expected_calls = [
mock.call.modify(
'uid=tux,ou=People,dc=python-ldap,dc=org',
{'title': [('MODIFY_REPLACE', [b'Superior'])]},
)
]
c.assert_has_calls(expected_calls)
# Assert caches are correct.
assert account1.get_as_single('dn') == "uid=tux,ou=People,dc=python-ldap,dc=org"
for key, value in python_expected_values.items():
assert account1[key] == value, key
def test_replace_dn(self, account1):
""" Test replace LDAP attribute. """
# Replace the attribute.
changes = tldap.database.changeset(account1, {'dn': "uid=penguin,ou=People,dc=python-ldap,dc=org"})
with pytest.raises(RuntimeError):
tldap.database.save(changes)
def test_replace_attribute(self, defaults, mock_ldap, account1):
""" Test replace LDAP attribute. """
c = mock_ldap
# Replace the attribute.
changes = tldap.database.changeset(account1, {'sn': "Closed"})
changes = changes.merge({'sn': "Gates", 'cn': "Tux Gates"})
account1 = tldap.database.save(changes)
# Simulate required attributes that should be added.
expected_values = dict(defaults.account_attributes)
expected_values.update({
'cn': 'Tux Gates',
'displayName': 'Tux Gates',
'gecos': 'Tux Gates',
'sn': "Gates",
})
python_expected_values = get_python_expected_values(expected_values)
# Assert that we made the correct calls to the backend.
expected_calls = [
mock.call.modify(
'uid=tux,ou=People,dc=python-ldap,dc=org',
{
'cn': [('MODIFY_REPLACE', [b'Tux Gates'])],
'displayName': [('MODIFY_REPLACE', [b'Tux Gates'])],
'gecos': [('MODIFY_REPLACE', [b'Tux Gates'])],
'sn': [('MODIFY_REPLACE', [b'Gates'])],
},
)
]
c.assert_has_calls(expected_calls)
# Assert caches are correct.
assert account1.get_as_single('dn') == "uid=tux,ou=People,dc=python-ldap,dc=org"
for key, value in python_expected_values.items():
assert account1[key] == value, key
def test_replace_attribute_same(self, account1):
""" Test replace LDAP attribute. """
# Replace the attribute.
changes = tldap.database.changeset(account1, {'sn': "Torvalds"})
assert 'sn' not in changes
# Replace the attribute.
changes = tldap.database.changeset(account1, {})
changes = changes.set('sn', "Torvalds")
assert 'sn' not in changes
# Replace the attribute.
changes = tldap.database.changeset(account1, {})
changes = changes.merge({'sn': "Torvalds"})
assert 'sn' not in changes
def test_replace_attribute_error(self, account1):
""" Test replace LDAP attribute with invalid value. """
# Replace the attribute.
changes = tldap.database.changeset(account1, {'gidNumber': "Torvalds"})
assert not changes.is_valid
assert changes.errors == ["gidNumber: should be a integer."]
with pytest.raises(RuntimeError):
tldap.database.save(changes)
def test_delete_attribute(self, defaults, mock_ldap, account1):
""" Test delete LDAP attribute. """
""" Test replace LDAP attribute. """
c = mock_ldap
# Replace the attribute.
changes = tldap.database.changeset(account1, {'telephoneNumber': None})
account1 = tldap.database.save(changes)
# Simulate required attributes that should be added.
expected_values = dict(defaults.account_attributes)
expected_values.update({
'telephoneNumber': None,
})
python_expected_values = get_python_expected_values(expected_values)
# Assert that we made the correct calls to the backend.
expected_calls = [
mock.call.modify(
'uid=tux,ou=People,dc=python-ldap,dc=org',
{
'telephoneNumber': [('MODIFY_DELETE', [])],
},
)
]
c.assert_has_calls(expected_calls)
# Assert caches are correct.
assert account1.get_as_single('dn') == "uid=tux,ou=People,dc=python-ldap,dc=org"
for key, value in python_expected_values.items():
assert account1[key] == value, key
|
class TestModelAccount:
def test_create(self, defaults, mock_ldap):
''' Test create LDAP object. '''
pass
def test_create_with_dn(self, defaults, mock_ldap):
''' Test create LDAP object. '''
pass
def test_search(self, defaults, mock_ldap, account1, group1):
''' Test delete LDAP object. '''
pass
def test_search_by_dn(self, mock_ldap, account1):
''' Test getting a person. '''
pass
def test_delete(self, mock_ldap, account1):
''' Test delete LDAP object. '''
pass
def test_rename(self, defaults, mock_ldap, account1):
''' Test rename LDAP object. '''
pass
def test_move(self, defaults, mock_ldap, account1):
''' Test move LDAP object. '''
pass
def test_add_attribute(self, defaults, mock_ldap, account1):
''' Test add new attribute. '''
pass
def test_replace_dn(self, account1):
''' Test replace LDAP attribute. '''
pass
def test_replace_attribute(self, defaults, mock_ldap, account1):
''' Test replace LDAP attribute. '''
pass
def test_replace_attribute_same(self, account1):
''' Test replace LDAP attribute. '''
pass
def test_replace_attribute_error(self, account1):
''' Test replace LDAP attribute with invalid value. '''
pass
def test_delete_attribute(self, defaults, mock_ldap, account1):
''' Test delete LDAP attribute. '''
pass
| 14 | 13 | 25 | 3 | 18 | 4 | 2 | 0.22 | 0 | 6 | 3 | 0 | 13 | 0 | 13 | 13 | 336 | 56 | 230 | 73 | 216 | 50 | 137 | 73 | 123 | 2 | 0 | 1 | 21 |
142,159 |
Karaage-Cluster/python-tldap
|
tldap/backend/fake_transactions.py
|
tldap.backend.fake_transactions.NoSuchObject
|
class NoSuchObject(Exception):
pass
|
class NoSuchObject(Exception):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
142,160 |
Karaage-Cluster/python-tldap
|
tldap/exceptions.py
|
tldap.exceptions.ValidationError
|
class ValidationError(Exception):
"""An error while validating data."""
pass
|
class ValidationError(Exception):
'''An error while validating data.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 3 | 0 | 2 | 1 | 1 | 1 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
142,161 |
Karaage-Cluster/python-tldap
|
tldap/database/__init__.py
|
tldap.database.Database
|
class Database:
def __init__(self, connection: LdapBase, settings: Optional[dict] = None):
self._connection = connection
if settings is None:
settings = connection.settings_dict
self._settings = settings
@property
def connection(self) -> LdapBase:
return self._connection
@property
def settings(self) -> dict:
return self._settings
|
class Database:
def __init__(self, connection: LdapBase, settings: Optional[dict] = None):
pass
@property
def connection(self) -> LdapBase:
pass
@property
def settings(self) -> dict:
pass
| 6 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 0 | 2 | 1 | 0 | 3 | 2 | 3 | 3 | 14 | 2 | 12 | 8 | 6 | 0 | 10 | 6 | 6 | 2 | 0 | 1 | 4 |
142,162 |
Karaage-Cluster/python-tldap
|
tests/database.py
|
tests.database.Account
|
class Account(LdapObject):
@classmethod
def get_fields(cls) -> Dict[str, tldap.fields.Field]:
fields = {
**helpers.get_fields_common(),
**helpers.get_fields_person(),
**helpers.get_fields_account(),
**helpers.get_fields_shadow(),
}
if os.environ['LDAP_TYPE'] == "openldap":
fields.update(helpers.get_fields_pwdpolicy())
elif os.environ['LDAP_TYPE'] == 'ds389':
fields.update(helpers.get_fields_password_object())
return fields
@classmethod
def get_search_options(cls, database: Database) -> SearchOptions:
settings = database.settings
return SearchOptions(
base_dn=settings['LDAP_ACCOUNT_BASE'],
object_class={'inetOrgPerson', 'organizationalPerson', 'person'},
pk_field="uid",
)
@classmethod
def on_load(cls, python_data: LdapObject, database: Database) -> LdapObject:
python_data = helpers.load_person(python_data, Group)
python_data = helpers.load_account(python_data, Group)
python_data = helpers.load_shadow(python_data)
if os.environ['LDAP_TYPE'] == "openldap":
python_data = helpers.load_pwdpolicy(python_data)
elif os.environ['LDAP_TYPE'] == 'ds389':
python_data = helpers.load_password_object(python_data)
return python_data
@classmethod
def on_save(cls, changes: Changeset, database: Database) -> Changeset:
settings = database.settings
changes = helpers.save_person(changes, database)
changes = helpers.save_account(changes, database)
changes = helpers.save_shadow(changes)
classes = ['top', 'person', 'inetOrgPerson', 'organizationalPerson',
'shadowAccount', 'posixAccount']
if os.environ['LDAP_TYPE'] == "openldap":
changes = helpers.save_pwdpolicy(changes)
classes = classes + ['pwdPolicy']
elif os.environ['LDAP_TYPE'] == 'ds389':
changes = helpers.save_password_object(changes)
classes = classes + ['passwordObject']
changes = dhelpers.save_account(changes, Account, database)
changes = helpers.set_object_class(changes, classes)
changes = helpers.rdn_to_dn(changes, 'uid', settings['LDAP_ACCOUNT_BASE'])
return changes
|
class Account(LdapObject):
@classmethod
def get_fields(cls) -> Dict[str, tldap.fields.Field]:
pass
@classmethod
def get_search_options(cls, database: Database) -> SearchOptions:
pass
@classmethod
def on_load(cls, python_data: LdapObject, database: Database) -> LdapObject:
pass
@classmethod
def on_save(cls, changes: Changeset, database: Database) -> Changeset:
pass
| 9 | 0 | 13 | 2 | 11 | 0 | 3 | 0 | 1 | 6 | 5 | 1 | 0 | 0 | 4 | 25 | 61 | 11 | 50 | 13 | 41 | 0 | 33 | 9 | 28 | 3 | 2 | 1 | 10 |
142,163 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_modlist.py
|
tests.a_unit.test_modlist.DNTest
|
class DNTest(unittest.TestCase):
def test_addModlist(self):
A = {
'A': ['ABC'],
'B': ['DEF'],
}
EXPECTED = {
'A': ['ABC'],
'B': ['DEF'],
}
modlist = tldap.modlist.addModlist(A)
self.assertEqual(modlist, EXPECTED)
def test_modifyModlist(self):
A = {
'A': ['ABC'],
'B': ['DEF'],
'I': [''],
'X': ['AA', 'BB', 'CC'],
'Y': ['AA', 'BB', 'DD'],
}
B = {
'A': ['ABC'],
'C': ['HIJ'],
'I': [''],
'X': ['CC', 'BB', 'AA'],
'Y': ['CC', 'BB', 'AA'],
}
EXPECTED = {
'B': (ldap3.MODIFY_DELETE, []),
'C': (ldap3.MODIFY_ADD, ['HIJ']),
'Y': (ldap3.MODIFY_REPLACE, ['CC', 'BB', 'AA']),
}
modlist = tldap.modlist.modifyModlist(A, B)
self.assertEqual(modlist, EXPECTED)
|
class DNTest(unittest.TestCase):
def test_addModlist(self):
pass
def test_modifyModlist(self):
pass
| 3 | 0 | 17 | 0 | 17 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 74 | 36 | 2 | 34 | 10 | 31 | 0 | 12 | 10 | 9 | 1 | 2 | 0 | 2 |
142,164 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_ldap_passwd.py
|
tests.a_unit.test_ldap_passwd.PasswordTest
|
class PasswordTest(unittest.TestCase):
def test_password_check_ldap_md5_crypt(self):
self.assertTrue(lp.check_password(
"test", "{MD5}CY9rzUYh03PK3k6DJie09g=="))
def test_password_check_ldap_sha1(self):
self.assertTrue(lp.check_password(
"test", "{SHA}qUqP5cyxm6YcTAhz05Hph5gvu9M="))
def test_password_check_ldap_salted_sha1(self):
self.assertTrue(lp.check_password(
"test", "{SSHA}sAloRnCFgBV+SjStZB0lIr8jCCq21to7"))
def test_password_check_ldap_salted_md5(self):
self.assertTrue(lp.check_password(
"test", "{SMD5}xosLPIl3lM7lKx4xeEDPmdpjTig="))
def test_password_check_md5_crypt(self):
self.assertTrue(lp.check_password(
"test", "{CRYPT}$1$U1TmLCl7$MZS59PDJxAE8j9fO/Zs4A0"))
# some old passwords have crypt in lower case
self.assertTrue(lp.check_password(
"test", "{crypt}$1$U1TmLCl7$MZS59PDJxAE8j9fO/Zs4A0"))
def test_password_check_des_crypt(self):
self.assertTrue(lp.check_password(
"test", "{CRYPT}PQl1.p7BcJRuM"))
# some old passwords have crypt in lower case
self.assertTrue(lp.check_password(
"test", "{crypt}PQl1.p7BcJRuM"))
def test_password_encode(self):
encrypted = lp.encode_password("test")
self.assertTrue(encrypted.startswith("{CRYPT}$6$"))
self.assertTrue(lp.check_password("test", encrypted))
self.assertFalse(lp.check_password("teddst", encrypted))
|
class PasswordTest(unittest.TestCase):
def test_password_check_ldap_md5_crypt(self):
pass
def test_password_check_ldap_sha1(self):
pass
def test_password_check_ldap_salted_sha1(self):
pass
def test_password_check_ldap_salted_md5(self):
pass
def test_password_check_md5_crypt(self):
pass
def test_password_check_des_crypt(self):
pass
def test_password_encode(self):
pass
| 8 | 0 | 4 | 0 | 4 | 0 | 1 | 0.07 | 1 | 0 | 0 | 0 | 7 | 0 | 7 | 79 | 37 | 7 | 28 | 9 | 20 | 2 | 20 | 9 | 12 | 1 | 2 | 0 | 7 |
142,165 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_dn.py
|
tests.a_unit.test_dn.DNTest
|
class DNTest(unittest.TestCase):
def test_rfc4512_char(self):
self.assertTrue(tldap.dn._isALPHA('A'))
self.assertFalse(tldap.dn._isALPHA('0'))
def test_rfc4512_number(self):
value = "0"
(result, i) = tldap.dn._number(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "10"
(result, i) = tldap.dn._number(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "1a"
(result, i) = tldap.dn._number(value, 0)
self.assertIsNotNone(result)
value = "1"
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = ""
(result, i) = tldap.dn._number(value, 0)
self.assertIsNone(result)
self.assertEqual(i, 0)
def test_rfc4512_keystring(self):
value = "A0b-d"
(result, i) = tldap.dn._keystring(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "A0b-d="
(result, i) = tldap.dn._keystring(value, 0)
self.assertIsNotNone(result)
value = value[:-1]
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "A"
(result, i) = tldap.dn._keystring(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "O"
(result, i) = tldap.dn._keystring(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "O="
(result, i) = tldap.dn._keystring(value, 0)
self.assertIsNotNone(result)
value = "O"
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "0b-d"
(result, i) = tldap.dn._keystring(value, 0)
self.assertIsNone(result)
self.assertEqual(i, 0)
def test_rfc4514_attributeType(self):
value = "A0b-d"
(result, i) = tldap.dn._attributeType(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "A0b-d="
(result, i) = tldap.dn._attributeType(value, 0)
self.assertIsNotNone(result)
value = value[:-1]
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "O"
(result, i) = tldap.dn._attributeType(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "O="
(result, i) = tldap.dn._attributeType(value, 0)
self.assertIsNotNone(result)
value = "O"
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "0b-d"
(result, i) = tldap.dn._attributeType(value, 0)
self.assertIsNotNone(result)
value = "0"
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "1.3.6.1.4.1.1466.0"
(result, i) = tldap.dn._attributeType(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
def test_rfc4514_string(self):
value = "AD"
(result, i) = tldap.dn._string(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "ABCD"
(result, i) = tldap.dn._string(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "AD,"
(result, i) = tldap.dn._string(value, 0)
self.assertIsNotNone(result)
value = value[:-1]
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "ABCD,"
(result, i) = tldap.dn._string(value, 0)
self.assertIsNotNone(result)
value = value[:-1]
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "\\\\a\\ \\#\\=\\+\\,\\;\\<\\>\\41"
(result, i) = tldap.dn._string(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, "\\a #=+,;<>A")
self.assertEqual(i, len(value))
def test_rfc4514_attributeValue(self):
value = "AD"
(result, i) = tldap.dn._attributeValue(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "ABCD"
(result, i) = tldap.dn._attributeValue(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "AD,"
(result, i) = tldap.dn._attributeValue(value, 0)
self.assertIsNotNone(result)
value = value[:-1]
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "ABCD,"
(result, i) = tldap.dn._attributeValue(value, 0)
self.assertIsNotNone(result)
value = value[:-1]
self.assertEqual(result, value)
self.assertEqual(i, len(value))
value = "\\\\a\\ \\#\\=\\+\\,\\;\\<\\>\\41"
(result, i) = tldap.dn._attributeValue(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, "\\a #=+,;<>A")
self.assertEqual(i, len(value))
value = "#414243"
(result, i) = tldap.dn._attributeValue(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, "ABC")
self.assertEqual(i, len(value))
value = "#"
(result, i) = tldap.dn._attributeValue(value, 0)
self.assertIsNone(result)
self.assertEqual(i, 0)
def test_rfc4514_attributeTypeAndValue(self):
value = "ABC=DEF"
(result, i) = tldap.dn._attributeTypeAndValue(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, ("ABC", "DEF", 1))
self.assertEqual(i, len(value))
value = "O=Isode Limited"
(result, i) = tldap.dn._attributeTypeAndValue(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, ("O", "Isode Limited", 1))
self.assertEqual(i, len(value))
def test_rfc4514_relativeDistinguishedName(self):
value = "ABC=DEF"
(result, i) = tldap.dn._relativeDistinguishedName(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, [("ABC", "DEF", 1)])
self.assertEqual(i, len(value))
value = "ABC=DEF+HIJ=KIF"
(result, i) = tldap.dn._relativeDistinguishedName(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, [("ABC", "DEF", 1), ("HIJ", "KIF", 1)])
self.assertEqual(i, len(value))
value = "ABC=DEF,HIJ=KIF"
(result, i) = tldap.dn._relativeDistinguishedName(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, [("ABC", "DEF", 1)])
self.assertEqual(i, len("ABC=DEF"))
def test_rfc4514_distinguishedName(self):
value = "ABC=DEF,HIJ=KIF"
(result, i) = tldap.dn._distinguishedName(value, 0)
self.assertIsNotNone(result)
self.assertEqual(result, [[('ABC', 'DEF', 1)], [('HIJ', 'KIF', 1)]])
self.assertEqual(i, len(value))
def test_str2dn(self):
value = "ABC=DEF,HIJ=KIF\\"
self.assertRaises(
tldap.exceptions.InvalidDN, lambda: tldap.dn.str2dn(value, 0))
value = "CN=Steve Kille,O=Isode Limited,C=GB"
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('CN', 'Steve Kille', 1)],
[('O', 'Isode Limited', 1)],
[('C', 'GB', 1)],
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, value)
value = "OU=Sales+CN=J. Smith,O=Widget Inc.,C=US"
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('OU', 'Sales', 1), ('CN', 'J. Smith', 1)],
[('O', 'Widget Inc.', 1)],
[('C', 'US', 1)],
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, value)
value = "CN=L. Eagle,O=Sue\\, Grabbit and Runn,C=GB"
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('CN', 'L. Eagle', 1)],
[('O', 'Sue, Grabbit and Runn', 1)],
[('C', 'GB', 1)],
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, value)
value = "CN=Before\\0DAfter,O=Test,C=GB"
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('CN', 'Before\rAfter', 1)],
[('O', 'Test', 1)],
[('C', 'GB', 1)],
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, "CN=Before\rAfter,O=Test,C=GB")
value = "CN=Before\rAfter,O=Test,C=GB"
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('CN', 'Before\rAfter', 1)],
[('O', 'Test', 1)],
[('C', 'GB', 1)],
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, value)
value = "1.3.6.1.4.1.1466.0=#04024869,O=Test,C=GB"
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('1.3.6.1.4.1.1466.0', '\x04\x02Hi', 1)],
[('O', 'Test', 1)],
[('C', 'GB', 1)],
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, "1.3.6.1.4.1.1466.0=\x04\x02Hi,O=Test,C=GB")
value = "1.3.6.1.4.1.1466.0=\x04\x02Hi,O=Test,C=GB"
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('1.3.6.1.4.1.1466.0', '\x04\x02Hi', 1)],
[('O', 'Test', 1)],
[('C', 'GB', 1)],
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, value)
def test_utf8(self):
# 2 byte UTF8
# UTF: 0x00A3
# UTF8: 0xC2 0xA3
value = six.u("ABC=DEF,HIJ=KIF£")
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('ABC', 'DEF', 1)], [('HIJ', six.u('KIF£'), 1)]
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, value)
# 3 byte UTF8
# UTF: 0x0982
# UTF8: 0xE0 0xA6 0x82
value = six.u("ABC=DEFং,HIJ=KIF")
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('ABC', six.u('DEFং'), 1)], [('HIJ', 'KIF', 1)]
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, value)
# 3 byte UTF8
# UTF: 0x4F60, 0x597D
# UTF8: 0xE4 0xBD 0xA0, 0xE5 0xA5 0xBD
value = six.u("ABC=DEF你好,HIJ=KIF")
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('ABC', six.u('DEF你好'), 1)], [('HIJ', 'KIF', 1)]
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, value)
# 4 byte UTF8
# UTF: 0x10300, 0x10301, 0x10302
# UTF8: 0xF0 0x90 0x8C 0x80, 0xF0 0x90 0x8C 0x81, 0xF0 0x90 0x8C 0x82
value = six.u("ABC=DEF𐌀𐌁𐌂,HIJ=KIF")
result = tldap.dn.str2dn(value)
self.assertIsNotNone(result)
self.assertEqual(result, [
[('ABC', six.u('DEF𐌀𐌁𐌂'), 1)], [('HIJ', 'KIF', 1)]
])
result = tldap.dn.dn2str(result)
self.assertEqual(result, value)
|
class DNTest(unittest.TestCase):
def test_rfc4512_char(self):
pass
def test_rfc4512_number(self):
pass
def test_rfc4512_keystring(self):
pass
def test_rfc4514_attributeType(self):
pass
def test_rfc4514_string(self):
pass
def test_rfc4514_attributeValue(self):
pass
def test_rfc4514_attributeTypeAndValue(self):
pass
def test_rfc4514_relativeDistinguishedName(self):
pass
def test_rfc4514_distinguishedName(self):
pass
def test_str2dn(self):
pass
def test_utf8(self):
pass
| 12 | 0 | 31 | 3 | 27 | 2 | 1 | 0.06 | 1 | 0 | 0 | 0 | 11 | 0 | 11 | 83 | 355 | 47 | 296 | 32 | 284 | 19 | 259 | 32 | 247 | 1 | 2 | 0 | 11 |
142,166 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_dict.py
|
tests.a_unit.test_dict.TestImmutable
|
class TestImmutable:
def test_init_lowercase(self):
allowed_values = {'NumberOfPenguins', 'NumberOfSharks'}
ci = ImmutableDict(allowed_values, {'numberofpenguins': 10})
assert ci.keys() == {'NumberOfPenguins'}
def test_init_mixedcase(self, ci):
allowed_values = {'NumberOfPenguins', 'NumberOfSharks'}
ci = ImmutableDict(allowed_values, {'numberOFpenguins': 10})
assert ci.keys() == {'NumberOfPenguins'}
def test_init_uppercase(self, ci):
allowed_values = {'NumberOfPenguins', 'NumberOfSharks'}
ci = ImmutableDict(allowed_values, {'NUMBEROFPENGUINS': 10})
assert ci.keys() == {'NumberOfPenguins'}
def test_init_not_valid(self, ci):
allowed_values = {'NumberOfPenguins', 'NumberOfSharks'}
with pytest.raises(KeyError):
ImmutableDict(allowed_values, {'numberOFfish': 10})
def test_set_fails(self, immutable):
with pytest.raises(TypeError):
immutable['numberofpenguins'] = 10
with pytest.raises(TypeError):
immutable['numberoffish'] = 10
def test_set_lowercase(self, immutable):
immutable = immutable.set('numberofpenguins', 10)
assert immutable.keys() == {'NumberOfPenguins'}
def test_set_mixedcase(self, immutable):
immutable = immutable.set('numberOFpenguins', 10)
assert immutable.keys() == {'NumberOfPenguins'}
def test_set_uppercase(self, immutable):
immutable = immutable.set('NUMBEROFPENGUINS', 10)
assert immutable.keys() == {'NumberOfPenguins'}
def test_set_not_valid(self, immutable):
with pytest.raises(KeyError):
immutable.set('numberOFfish', 10)
def test_get(self, immutable):
immutable = immutable.set('numberOFpenguins', 10)
assert immutable['numberofpenguins'] == 10
assert immutable['NumberOfPenguins'] == 10
assert immutable['NUMBEROFPENGUINS'] == 10
def test_get_not_set(self, immutable):
immutable = immutable.set('numberOFpenguins', 10)
with pytest.raises(KeyError):
assert immutable['NumberOfSharks'] == 10
def test_get_valid(self, immutable):
immutable = immutable.set('numberOFpenguins', 10)
with pytest.raises(KeyError):
assert immutable['nUmberoFfIsh'] == 10
|
class TestImmutable:
def test_init_lowercase(self):
pass
def test_init_mixedcase(self, ci):
pass
def test_init_uppercase(self, ci):
pass
def test_init_not_valid(self, ci):
pass
def test_set_fails(self, immutable):
pass
def test_set_lowercase(self, immutable):
pass
def test_set_mixedcase(self, immutable):
pass
def test_set_uppercase(self, immutable):
pass
def test_set_not_valid(self, immutable):
pass
def test_get(self, immutable):
pass
def test_get_not_set(self, immutable):
pass
def test_get_valid(self, immutable):
pass
| 13 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 0 | 3 | 1 | 0 | 12 | 0 | 12 | 12 | 60 | 13 | 47 | 18 | 34 | 0 | 47 | 18 | 34 | 1 | 0 | 1 | 12 |
142,167 |
Karaage-Cluster/python-tldap
|
tldap/backend/no_transactions.py
|
tldap.backend.no_transactions.LDAPwrapper
|
class LDAPwrapper(LdapBase):
""" The LDAP connection class. """
####################
# Cache Management #
####################
def reset(self, force_flush_cache: bool = False) -> None:
"""
Reset transaction back to original state, discarding all
uncompleted transactions.
"""
pass
##########################
# Transaction Management #
##########################
# Fake it
def is_dirty(self) -> bool:
""" Are there uncommitted changes? """
return False
def is_managed(self) -> bool:
""" Are we inside transaction management? """
return False
def enter_transaction_management(self) -> None:
""" Start a transaction. """
pass
def leave_transaction_management(self) -> None:
"""
End a transaction. Must not be dirty when doing so. ie. commit() or
rollback() must be called if changes made. If dirty, changes will be
discarded.
"""
pass
def commit(self) -> None:
"""
Attempt to commit all changes to LDAP database. i.e. forget all
rollbacks. However stay inside transaction management.
"""
pass
def rollback(self) -> None:
"""
Roll back to previous database state. However stay inside transaction
management.
"""
pass
##################################
# Functions needing Transactions #
##################################
def add(self, dn: str, mod_list: dict) -> None:
"""
Add a DN to the LDAP database; See ldap module. Doesn't return a result
if transactions enabled.
"""
return self._do_with_retry(lambda obj: obj.add_s(dn, mod_list))
def modify(self, dn: str, mod_list: dict) -> None:
"""
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
"""
return self._do_with_retry(lambda obj: obj.modify_s(dn, mod_list))
def modify_no_rollback(self, dn: str, mod_list: dict) -> None:
"""
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
"""
return self._do_with_retry(lambda obj: obj.modify_s(dn, mod_list))
def delete(self, dn: str) -> None:
"""
delete a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
return self._do_with_retry(lambda obj: obj.delete_s(dn))
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:
"""
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
return self._do_with_retry(
lambda obj: obj.rename_s(dn, new_rdn, new_base_dn))
|
class LDAPwrapper(LdapBase):
''' The LDAP connection class. '''
def reset(self, force_flush_cache: bool = False) -> None:
'''
Reset transaction back to original state, discarding all
uncompleted transactions.
'''
pass
def is_dirty(self) -> bool:
''' Are there uncommitted changes? '''
pass
def is_managed(self) -> bool:
''' Are we inside transaction management? '''
pass
def enter_transaction_management(self) -> None:
''' Start a transaction. '''
pass
def leave_transaction_management(self) -> None:
'''
End a transaction. Must not be dirty when doing so. ie. commit() or
rollback() must be called if changes made. If dirty, changes will be
discarded.
'''
pass
def commit(self) -> None:
'''
Attempt to commit all changes to LDAP database. i.e. forget all
rollbacks. However stay inside transaction management.
'''
pass
def rollback(self) -> None:
'''
Roll back to previous database state. However stay inside transaction
management.
'''
pass
def add(self, dn: str, mod_list: dict) -> None:
'''
Add a DN to the LDAP database; See ldap module. Doesn't return a result
if transactions enabled.
'''
pass
def modify(self, dn: str, mod_list: dict) -> None:
'''
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
'''
pass
def modify_no_rollback(self, dn: str, mod_list: dict) -> None:
'''
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
'''
pass
def delete(self, dn: str) -> None:
'''
delete a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
'''
pass
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:
'''
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
'''
pass
| 13 | 13 | 6 | 0 | 2 | 3 | 1 | 1.96 | 1 | 3 | 0 | 0 | 12 | 0 | 12 | 32 | 98 | 21 | 26 | 13 | 13 | 51 | 25 | 13 | 12 | 1 | 2 | 0 | 12 |
142,168 |
Karaage-Cluster/python-tldap
|
tldap/database/__init__.py
|
tldap.database.Changeset
|
class Changeset(ImmutableDict):
""" Represents a set of changes to an LdapObject. """
def __init__(self, fields: Dict[str, tldap.fields.Field], src: LdapObject, d: Optional[dict] = None) -> None:
self._fields = fields
self._src = src
self._changes: Dict[str, List[Tuple[Operation, List[Any]]]] = {}
self._errors: List[str] = []
field_names = set(fields.keys())
super().__init__(field_names, d)
def __copy__(self: ChangesetEntity) -> ChangesetEntity:
copy = self.__class__(self._fields, self._src, self._dict)
copy._changes = self._changes
return copy
def get_value_as_single(self, key: str) -> any:
key = self.fix_key(key)
if key in self._dict:
field = self._fields[key]
return _list_to_python(field, self._dict[key])
else:
return self._src.get_as_single(key)
def get_value_as_list(self, key: str) -> List[Any]:
if key in self._dict:
return self._dict[key]
else:
return self._src.get_as_list(key)
@property
def changes(self) -> Dict[str, List[Tuple[Operation, List[Any]]]]:
return self._changes
@staticmethod
def _python_to_list(value: Any) -> List[Any]:
value_list = _python_to_list(value)
if isinstance(value_list, NotLoaded):
raise RuntimeError("Unexpected NotLoaded value in Changeset.")
for value in value_list:
if isinstance(value, NotLoaded):
raise RuntimeError("Unexpected NotLoaded value in Changeset.")
return value_list
def _set(self, key: str, value: Any) -> None:
old_value = self.get_value_as_list(key)
value_list = self._python_to_list(value)
if value_list != old_value:
operation: Operation = ldap3.MODIFY_REPLACE
if value is None or value == []:
operation = ldap3.MODIFY_DELETE
self._add_mod(key, operation, value_list, overwrite=True)
self._replay_mod(key, operation, value_list)
return
def force_add(self, key: str, value: Any) -> 'Changeset':
value_list = self._python_to_list(value)
clone = self.__copy__()
clone._add_mod(key, ldap3.MODIFY_ADD, value_list)
clone._replay_mod(key, ldap3.MODIFY_ADD, value_list)
return clone
def force_replace(self, key: str, value: Any) -> 'Changeset':
value_list = self._python_to_list(value)
clone = self.__copy__()
clone._add_mod(key, ldap3.MODIFY_REPLACE, value_list)
clone._replay_mod(key, ldap3.MODIFY_REPLACE, value_list)
return clone
def force_delete(self, key: str, value: Any) -> 'Changeset':
value_list = self._python_to_list(value)
clone = self.__copy__()
clone._add_mod(key, ldap3.MODIFY_DELETE, value_list)
clone._replay_mod(key, ldap3.MODIFY_DELETE, value_list)
return clone
def _add_mod(self, key: str, operation: Operation, new_value_list: List[Any], overwrite=False) -> None:
if any(isinstance(value, list) for value in new_value_list):
raise RuntimeError("Got list inside a list.")
key = self.fix_key(key)
if key in self._changes:
if overwrite:
new_list = []
else:
new_list = self._changes[key]
else:
new_list = []
new_list = new_list + [(operation, new_value_list)]
self._changes = {
**self._changes,
key: new_list
}
def _replay_mod(self, key: str, operation: Operation, new_value_list: List[Any]):
if any(isinstance(value, list) for value in new_value_list):
raise RuntimeError("Got list inside a list.")
key = self.fix_key(key)
old_value_list = self.get_value_as_list(key)
if operation == ldap3.MODIFY_ADD:
assert isinstance(new_value_list, list)
for value in new_value_list:
if value not in old_value_list:
old_value_list.append(value)
if len(old_value_list) == 0:
raise RuntimeError("Can't add 0 items.")
elif operation == ldap3.MODIFY_REPLACE:
old_value_list = new_value_list
elif operation == ldap3.MODIFY_DELETE:
if len(new_value_list) == 0:
old_value_list = []
else:
for value in new_value_list:
old_value_list.remove(value)
else:
raise RuntimeError(f"Unknown LDAP operation {operation}.")
self._dict[key] = old_value_list
field = self._fields[key]
try:
field.validate(old_value_list)
except tldap.exceptions.ValidationError as e:
self._errors.append(f"{key}: {e}.")
@property
def is_valid(self) -> bool:
return len(self._errors) == 0
@property
def errors(self) -> List[str]:
return self._errors
@property
def src(self) -> LdapObject:
return self._src
|
class Changeset(ImmutableDict):
''' Represents a set of changes to an LdapObject. '''
def __init__(self, fields: Dict[str, tldap.fields.Field], src: LdapObject, d: Optional[dict] = None) -> None:
pass
def __copy__(self: ChangesetEntity) -> ChangesetEntity:
pass
def get_value_as_single(self, key: str) -> any:
pass
def get_value_as_list(self, key: str) -> List[Any]:
pass
@property
def changes(self) -> Dict[str, List[Tuple[Operation, List[Any]]]]:
pass
@staticmethod
def _python_to_list(value: Any) -> List[Any]:
pass
def _set(self, key: str, value: Any) -> None:
pass
def force_add(self, key: str, value: Any) -> 'Changeset':
pass
def force_replace(self, key: str, value: Any) -> 'Changeset':
pass
def force_delete(self, key: str, value: Any) -> 'Changeset':
pass
def _add_mod(self, key: str, operation: Operation, new_value_list: List[Any], overwrite=False) -> None:
pass
def _replay_mod(self, key: str, operation: Operation, new_value_list: List[Any]):
pass
@property
def is_valid(self) -> bool:
pass
@property
def errors(self) -> List[str]:
pass
@property
def src(self) -> LdapObject:
pass
| 21 | 1 | 8 | 1 | 7 | 0 | 2 | 0.01 | 1 | 11 | 3 | 0 | 14 | 4 | 15 | 27 | 148 | 30 | 117 | 42 | 96 | 1 | 101 | 36 | 85 | 11 | 1 | 3 | 35 |
142,169 |
Karaage-Cluster/python-tldap
|
tldap/query_utils.py
|
tldap.query_utils.Q
|
class Q(Node):
"""
Encapsulates filters as objects that can then be combined logically
(using ``&`` and ``|``).
"""
# Connection types
AND = 'AND'
OR = 'OR'
default = AND
def __init__(self, *args, **kwargs):
super(Q, self).__init__(
children=list(args) + list(six.iteritems(kwargs)))
def _combine(self, other: 'Q', conn: str) -> 'Q':
if not isinstance(other, Q):
raise TypeError(other)
if len(self.children) < 1:
self.connector = conn
obj = type(self)()
obj.connector = conn
obj.add(self, conn)
obj.add(other, conn)
return obj
def __or__(self, other: 'Q'):
return self._combine(other, self.OR)
def __and__(self, other: 'Q'):
return self._combine(other, self.AND)
def __invert__(self):
obj = type(self)()
obj.add(self, self.AND)
obj.negate()
return obj
|
class Q(Node):
'''
Encapsulates filters as objects that can then be combined logically
(using ``&`` and ``|``).
'''
def __init__(self, *args, **kwargs):
pass
def _combine(self, other: 'Q', conn: str) -> 'Q':
pass
def __or__(self, other: 'Q'):
pass
def __and__(self, other: 'Q'):
pass
def __invert__(self):
pass
| 6 | 1 | 4 | 0 | 4 | 0 | 1 | 0.19 | 1 | 5 | 0 | 0 | 5 | 1 | 5 | 16 | 36 | 5 | 26 | 12 | 20 | 5 | 25 | 12 | 19 | 3 | 2 | 1 | 7 |
142,170 |
Karaage-Cluster/python-tldap
|
tldap/fields.py
|
tldap.fields.CharField
|
class CharField(Field):
""" Field contains a UTF8 character string. """
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
if isinstance(value, six.string_types):
value = value.encode("utf_8")
return value
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be a bytes")
value = value.decode("utf_8")
return value
def value_validate(self, value):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
if not isinstance(value, six.string_types):
raise tldap.exceptions.ValidationError("should be a string")
|
class CharField(Field):
''' Field contains a UTF8 character string. '''
def value_to_db(self, value):
''' Returns field's single value prepared for saving into a database. '''
pass
def value_to_python(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
def value_validate(self, value):
'''
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
'''
pass
| 4 | 4 | 8 | 0 | 4 | 4 | 2 | 0.92 | 1 | 2 | 1 | 0 | 3 | 0 | 3 | 13 | 28 | 3 | 13 | 4 | 9 | 12 | 13 | 4 | 9 | 2 | 2 | 1 | 6 |
142,171 |
Karaage-Cluster/python-tldap
|
tldap/transaction.py
|
tldap.transaction.TransactionManagementError
|
class TransactionManagementError(Exception):
"""
This exception is thrown when something bad happens with transaction
management.
"""
pass
|
class TransactionManagementError(Exception):
'''
This exception is thrown when something bad happens with transaction
management.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 6 | 0 | 2 | 1 | 1 | 4 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
142,172 |
Karaage-Cluster/python-tldap
|
tldap/tree.py
|
tldap.tree.Node
|
class Node(object):
"""
A single internal node in the tree graph. A Node should be viewed as a
connection (the root) with the children being either leaf nodes or other
Node instances.
"""
# Standard connector type. Clients usually won't use this at all and
# subclasses will usually override the value.
default = 'DEFAULT'
def __init__(self, children=None, connector=None, negated=False):
"""
Constructs a new Node. If no connector is given, the default will be
used.
"""
self.children = children[:] if children else []
self.connector = connector or self.default
self.negated = negated
# We need this because of django.db.models.query_utils.Q. Q. __init__() is
# problematic, but it is a natural Node subclass in all other respects.
@classmethod
def _new_instance(cls, children=None, connector=None, negated=False):
"""
This is called to create a new instance of this class when we need new
Nodes (or subclasses) in the internal code in this class. Normally, it
just shadows __init__(). However, subclasses with an __init__ signature
that is not an extension of Node.__init__ might need to implement this
method to allow a Node to create a new instance of them (if they have
any extra setting up to do).
"""
obj = Node(children, connector, negated)
obj.__class__ = cls
return obj
def __str__(self):
if self.negated:
return '(NOT (%s: %s))' % (self.connector, ', '.join(str(c) for c
in self.children))
return '(%s: %s)' % (self.connector, ', '.join(str(c) for c in
self.children))
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def __deepcopy__(self, memodict):
"""
Utility method used by copy.deepcopy().
"""
obj = Node(connector=self.connector, negated=self.negated)
obj.__class__ = self.__class__
obj.children = copy.deepcopy(self.children, memodict)
return obj
def __len__(self):
"""
The size of a node if the number of children it has.
"""
return len(self.children)
def __bool__(self):
"""
For truth value testing.
"""
return bool(self.children)
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
def __contains__(self, other):
"""
Returns True is 'other' is a direct child of this instance.
"""
return other in self.children
def add(self, data, conn_type, squash=True):
"""
Combines this tree and the data represented by data using the
connector conn_type. The combine is done by squashing the node other
away if possible.
This tree (self) will never be pushed to a child node of the
combined tree, nor will the connector or negated properties change.
The function returns a node which can be used in place of data
regardless if the node other got squashed or not.
If `squash` is False the data is prepared and added as a child to
this tree without further logic.
"""
if data in self.children:
return data
if not squash:
self.children.append(data)
return data
if self.connector == conn_type:
# We can reuse self.children to append or squash the node other.
if (isinstance(data, Node) and not data.negated
and (data.connector == conn_type or len(data) == 1)):
# We can squash the other node's children directly into this
# node. We are just doing (AB)(CD) == (ABCD) here, with the
# addition that if the length of the other node is 1 the
# connector doesn't matter. However, for the len(self) == 1
# case we don't want to do the squashing, as it would alter
# self.connector.
self.children.extend(data.children)
return self
else:
# We could use perhaps additional logic here to see if some
# children could be used for pushdown here.
self.children.append(data)
return data
else:
obj = self._new_instance(self.children, self.connector,
self.negated)
self.connector = conn_type
self.children = [obj, data]
return data
def negate(self):
"""
Negate the sense of the root connector.
"""
self.negated = not self.negated
|
class Node(object):
'''
A single internal node in the tree graph. A Node should be viewed as a
connection (the root) with the children being either leaf nodes or other
Node instances.
'''
def __init__(self, children=None, connector=None, negated=False):
'''
Constructs a new Node. If no connector is given, the default will be
used.
'''
pass
@classmethod
def _new_instance(cls, children=None, connector=None, negated=False):
'''
This is called to create a new instance of this class when we need new
Nodes (or subclasses) in the internal code in this class. Normally, it
just shadows __init__(). However, subclasses with an __init__ signature
that is not an extension of Node.__init__ might need to implement this
method to allow a Node to create a new instance of them (if they have
any extra setting up to do).
'''
pass
def __str__(self):
pass
def __repr__(self):
pass
def __deepcopy__(self, memodict):
'''
Utility method used by copy.deepcopy().
'''
pass
def __len__(self):
'''
The size of a node if the number of children it has.
'''
pass
def __bool__(self):
'''
For truth value testing.
'''
pass
def __nonzero__(self):
pass
def __contains__(self, other):
'''
Returns True is 'other' is a direct child of this instance.
'''
pass
def add(self, data, conn_type, squash=True):
'''
Combines this tree and the data represented by data using the
connector conn_type. The combine is done by squashing the node other
away if possible.
This tree (self) will never be pushed to a child node of the
combined tree, nor will the connector or negated properties change.
The function returns a node which can be used in place of data
regardless if the node other got squashed or not.
If `squash` is False the data is prepared and added as a child to
this tree without further logic.
'''
pass
def negate(self):
'''
Negate the sense of the root connector.
'''
pass
| 13 | 9 | 9 | 0 | 5 | 4 | 2 | 1.06 | 1 | 3 | 0 | 1 | 10 | 3 | 11 | 11 | 124 | 14 | 54 | 20 | 41 | 57 | 47 | 19 | 35 | 5 | 1 | 2 | 17 |
142,173 |
Karaage-Cluster/python-tldap
|
tldap/utils.py
|
tldap.utils.ConnectionHandler
|
class ConnectionHandler(object):
""" Contains a list of known LDAP connections. """
def __init__(self, databases):
self.databases = databases
self._connections = local()
def __getitem__(self, alias):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
db = self.databases[alias]
backend = load_backend(db['ENGINE'])
conn = backend.LDAPwrapper(db)
setattr(self._connections, alias, conn)
return conn
def __iter__(self):
return iter(self.databases)
def all(self):
""" Return list of all connections. """
return [self[alias] for alias in self]
|
class ConnectionHandler(object):
''' Contains a list of known LDAP connections. '''
def __init__(self, databases):
pass
def __getitem__(self, alias):
pass
def __iter__(self):
pass
def all(self):
''' Return list of all connections. '''
pass
| 5 | 2 | 5 | 1 | 4 | 0 | 1 | 0.13 | 1 | 1 | 0 | 0 | 4 | 2 | 4 | 4 | 24 | 6 | 16 | 10 | 11 | 2 | 16 | 10 | 11 | 2 | 1 | 1 | 5 |
142,174 |
Karaage-Cluster/python-tldap
|
tldap/fields.py
|
tldap.fields.BinaryField
|
class BinaryField(Field):
""" Field contains a binary value that can not be interpreted in anyway.
"""
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
assert value is None or isinstance(value, bytes)
return value
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be a bytes")
return value
def value_validate(self, value):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be a bytes")
|
class BinaryField(Field):
''' Field contains a binary value that can not be interpreted in anyway.
'''
def value_to_db(self, value):
''' Returns field's single value prepared for saving into a database. '''
pass
def value_to_python(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
def value_validate(self, value):
'''
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
'''
pass
| 4 | 4 | 7 | 0 | 3 | 4 | 2 | 1.18 | 1 | 2 | 1 | 0 | 3 | 0 | 3 | 13 | 27 | 3 | 11 | 4 | 7 | 13 | 11 | 4 | 7 | 2 | 2 | 1 | 5 |
142,175 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_database.py
|
tests.a_unit.test_database.SearchMock
|
class SearchMock:
def __init__(self):
self.calls = []
self.results = []
def add_result(self, search: bytes, obj: tldap.database.LdapObject):
assert isinstance(search, bytes)
self.results.append((b"(%s)" % search, obj))
def __call__(
self, base: str, scope: str, filterstr: bytes=b'(objectClass=*)',
attrlist: Optional[List[str]]=None, limit: Optional[int]=None):
self.calls.append((base, scope, filterstr, attrlist, limit))
results = []
for search, obj in self.results:
if search in filterstr:
obj_values = obj.to_dict()
results.append(
(obj['dn'], get_db_values(obj_values, type(obj)))
)
return results
def reset(self):
self.calls = []
|
class SearchMock:
def __init__(self):
pass
def add_result(self, search: bytes, obj: tldap.database.LdapObject):
pass
def __call__(
self, base: str, scope: str, filterstr: bytes=b'(objectClass=*)',
attrlist:
pass
def reset(self):
pass
| 5 | 0 | 6 | 1 | 5 | 0 | 2 | 0 | 0 | 5 | 1 | 0 | 4 | 2 | 4 | 4 | 26 | 5 | 21 | 12 | 14 | 0 | 17 | 10 | 12 | 3 | 0 | 2 | 6 |
142,176 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_database.py
|
tests.a_unit.test_database.Defaults
|
class Defaults:
pass
|
class Defaults:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 0 | 0 |
142,177 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_database.py
|
tests.a_unit.test_database.CheckInstance
|
class CheckInstance(TestObject):
def __init__(self, instance_type):
self._type = instance_type
def __eq__(self, other):
return isinstance(other, self._type)
def __repr__(self):
return '<type %s>' % self._type
|
class CheckInstance(TestObject):
def __init__(self, instance_type):
pass
def __eq__(self, other):
pass
def __repr__(self):
pass
| 4 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 3 | 1 | 3 | 4 | 10 | 3 | 7 | 5 | 3 | 0 | 7 | 5 | 3 | 1 | 1 | 0 | 3 |
142,178 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_backend_fake_transactions.py
|
tests.a_unit.test_backend_fake_transactions.TestBackendFakeTransactions
|
class TestBackendFakeTransactions:
def test_roll_back_explicit(self, search_response, defaults):
""" Test explicit roll back. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
with tldap.transaction.commit_on_success():
c.add(dn, defaults.modlist)
c.modify(dn, {
'sn': [(ldap3.MODIFY_REPLACE, [b"Gates"])]
})
c.rollback()
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'sn': [('MODIFY_REPLACE', [b'Gates'])]}),
call.modify(dn, {'sn': [('MODIFY_REPLACE', [b'Torvalds'])]}),
call.delete(dn)
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_roll_back_exception(self, search_response, defaults):
""" Test roll back on exception. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
with pytest.raises(RuntimeError):
with tldap.transaction.commit_on_success():
c.add(dn, defaults.modlist)
c.modify(dn, {
'sn': [(ldap3.MODIFY_REPLACE, [b"Gates"])]
})
raise RuntimeError("testing failure")
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'sn': [('MODIFY_REPLACE', [b'Gates'])]}),
call.modify(dn, {'sn': [('MODIFY_REPLACE', [b'Torvalds'])]}),
call.delete(dn)
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_replace_attribute_rollback(self, search_response, defaults):
""" Test replace attribute with explicit roll back. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with pytest.raises(tldap.exceptions.TestFailure):
with tldap.transaction.commit_on_success():
c.modify(dn, {
'sn': [(ldap3.MODIFY_REPLACE, [b"Gates"])]
})
c.fail() # raises TestFailure during commit causing rollback
c.commit()
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'sn': [('MODIFY_REPLACE', [b'Gates'])]}),
call.modify(dn, {'sn': [('MODIFY_REPLACE', [b'Torvalds'])]}),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_replace_attribute_success(self, search_response, defaults):
""" Test change attribute with success. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with tldap.transaction.commit_on_success():
c.modify(dn, {
'sn': [(ldap3.MODIFY_REPLACE, [b"Gates"])]
})
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'sn': [('MODIFY_REPLACE', [b'Gates'])]}),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_replace_attribute_list_rollback(self, search_response, defaults):
""" Test replacing attribute with rollback. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with pytest.raises(tldap.exceptions.TestFailure):
with tldap.transaction.commit_on_success():
c.modify(dn, {
"telephoneNumber": [(ldap3.MODIFY_REPLACE, [b"222"])]
})
c.fail() # raises TestFailure during commit causing rollback
c.commit()
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'telephoneNumber': [('MODIFY_REPLACE', [b'222'])]}),
call.modify(dn, {'telephoneNumber': [('MODIFY_REPLACE', [b'000'])]}),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_replace_attribute_list_success(self, search_response, defaults):
""" Test replacing attribute with success. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with tldap.transaction.commit_on_success():
c.modify(dn, {
'telephoneNumber': [(ldap3.MODIFY_REPLACE, [b"222"])]
})
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'telephoneNumber': [('MODIFY_REPLACE', [b'222'])]}),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_delete_attribute_rollback(self, search_response, defaults):
""" Test deleting attribute *of new object* with rollback. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with pytest.raises(tldap.exceptions.TestFailure):
with tldap.transaction.commit_on_success():
c.modify(dn, {
"telephoneNumber": [(ldap3.MODIFY_DELETE, [b'000'])]
})
c.fail() # raises TestFailure during commit causing rollback
c.commit()
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'telephoneNumber': [('MODIFY_DELETE', [b'000'])]}),
call.modify(dn, {'telephoneNumber': [('MODIFY_ADD', [b'000'])]}),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_delete_attribute_success(self, search_response, defaults):
""" Test deleting attribute *of new object* with success. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with tldap.transaction.commit_on_success():
c.modify(dn, {
"telephoneNumber": [(ldap3.MODIFY_DELETE, [b'000'])]
})
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'telephoneNumber': [('MODIFY_DELETE', [b'000'])]}),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_add_attribute_rollback(self, search_response, defaults):
""" Test adding attribute with rollback. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with pytest.raises(tldap.exceptions.TestFailure):
with tldap.transaction.commit_on_success():
c.modify(dn, {
"telephoneNumber": [(ldap3.MODIFY_ADD, [b"111"])]
})
c.fail() # raises TestFailure during commit causing rollback
c.commit()
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'telephoneNumber': [('MODIFY_ADD', [b'111'])]}),
call.modify(dn, {'telephoneNumber': [('MODIFY_DELETE', [b'111'])]}),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_add_attribute_success(
self, search_response, defaults):
""" Test adding attribute with success. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with tldap.transaction.commit_on_success():
c.modify(dn, {
'telephoneNumber': [(ldap3.MODIFY_ADD, [b"111"])]
})
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'telephoneNumber': [('MODIFY_ADD', [b'111'])]}),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_third_statement_fails(self, search_response, defaults):
"""
Test success when 3rd statement fails;
Need to roll back 2nd and 1st statements
"""
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with pytest.raises(tldap.exceptions.TestFailure):
with tldap.transaction.commit_on_success():
c.modify(dn, {
"sn": [(ldap3.MODIFY_REPLACE, b"Milkshakes")]
})
c.modify(dn, {
"sn": [(ldap3.MODIFY_REPLACE, [b"Bannas"])]
})
c.fail() # raises TestFailure during commit causing rollback
c.commit()
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'sn': [('MODIFY_REPLACE', b'Milkshakes')]}),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn, {'sn': [('MODIFY_REPLACE', [b'Bannas'])]}),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_rename_rollback(self, search_response, defaults):
""" Test rename with rollback. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
dn2 = 'uid=tuz,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with pytest.raises(tldap.exceptions.TestFailure):
with tldap.transaction.commit_on_success():
c.rename(
dn, 'uid=tuz',
)
c.modify(dn2, {
"sn": [(ldap3.MODIFY_REPLACE, [b"Tuz"])]
})
c.fail() # raises TestFailure during commit causing rollback
c.commit()
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.modify_dn(dn, 'uid=tuz', new_superior=None),
call.search(dn2, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn2, {'sn': [('MODIFY_REPLACE', [b'Tuz'])]}),
call.modify(dn2, {'sn': [('MODIFY_REPLACE', [b'Torvalds'])]}),
call.modify_dn(dn2, 'uid=tux', new_superior=None),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_rename_success(self, search_response, defaults):
""" Test rename with success. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
dn2 = 'uid=tuz,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with tldap.transaction.commit_on_success():
c.rename(
dn, 'uid=tuz',
)
c.modify(dn2, {
'sn': [(ldap3.MODIFY_REPLACE, [b"Tuz"])]
})
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.modify_dn(dn, 'uid=tuz', new_superior=None),
call.search(dn2, '(objectclass=*)', 'BASE', attributes=ANY),
call.modify(dn2, {'sn': [('MODIFY_REPLACE', [b'Tuz'])]}),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_move_rollback(self, search_response, defaults):
""" Test move with rollback. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
dn2 = 'uid=tux,ou=Groups,dc=python-ldap,dc=org'
old_base = 'ou=People,dc=python-ldap,dc=org'
new_base = 'ou=Groups,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with pytest.raises(tldap.exceptions.TestFailure):
with tldap.transaction.commit_on_success():
c.rename(
dn,
"uid=tux", "ou=Groups,dc=python-ldap,dc=org",
)
c.fail() # raises TestFailure during commit causing rollback
c.commit()
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.modify_dn(dn, 'uid=tux', new_superior=new_base),
call.modify_dn(dn2, 'uid=tux', new_superior=old_base),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_move_success(self, search_response, defaults):
""" Test move with success. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
new_base = 'ou=Groups,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with tldap.transaction.commit_on_success():
c.rename(
dn,
"uid=tux", new_base,
)
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.modify_dn(dn, 'uid=tux', new_superior=new_base),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_delete_rollback(self, search_response, defaults):
""" Test delete rollback. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with pytest.raises(tldap.exceptions.TestFailure):
with tldap.transaction.commit_on_success():
c.delete(dn)
c.fail() # raises TestFailure during commit causing rollback
c.commit()
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.delete(dn),
call.add(dn, None, defaults.modlist),
]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_delete_success(self, search_response, defaults):
""" Test delete success. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
c.add(dn, defaults.modlist)
with tldap.transaction.commit_on_success():
c.delete(dn)
expected_calls = [
call.open(),
call.bind(),
call.add(dn, None, defaults.modlist),
call.search(dn, '(objectclass=*)', 'BASE', attributes=ANY),
call.delete(dn),
]
defaults.mock_connection.assert_has_calls(expected_calls)
|
class TestBackendFakeTransactions:
def test_roll_back_explicit(self, search_response, defaults):
''' Test explicit roll back. '''
pass
def test_roll_back_exception(self, search_response, defaults):
''' Test roll back on exception. '''
pass
def test_replace_attribute_rollback(self, search_response, defaults):
''' Test replace attribute with explicit roll back. '''
pass
def test_replace_attribute_success(self, search_response, defaults):
''' Test change attribute with success. '''
pass
def test_replace_attribute_list_rollback(self, search_response, defaults):
''' Test replacing attribute with rollback. '''
pass
def test_replace_attribute_list_success(self, search_response, defaults):
''' Test replacing attribute with success. '''
pass
def test_delete_attribute_rollback(self, search_response, defaults):
''' Test deleting attribute *of new object* with rollback. '''
pass
def test_delete_attribute_success(self, search_response, defaults):
''' Test deleting attribute *of new object* with success. '''
pass
def test_add_attribute_rollback(self, search_response, defaults):
''' Test adding attribute with rollback. '''
pass
def test_add_attribute_success(
self, search_response, defaults):
''' Test adding attribute with success. '''
pass
def test_third_statement_fails(self, search_response, defaults):
'''
Test success when 3rd statement fails;
Need to roll back 2nd and 1st statements
'''
pass
def test_rename_rollback(self, search_response, defaults):
''' Test rename with rollback. '''
pass
def test_rename_success(self, search_response, defaults):
''' Test rename with success. '''
pass
def test_move_rollback(self, search_response, defaults):
''' Test move with rollback. '''
pass
def test_move_success(self, search_response, defaults):
''' Test move with success. '''
pass
def test_delete_rollback(self, search_response, defaults):
''' Test delete rollback. '''
pass
def test_delete_success(self, search_response, defaults):
''' Test delete success. '''
pass
| 18 | 17 | 23 | 2 | 20 | 2 | 1 | 0.08 | 0 | 2 | 1 | 0 | 17 | 0 | 17 | 17 | 416 | 51 | 345 | 76 | 326 | 28 | 190 | 75 | 172 | 1 | 0 | 2 | 17 |
142,179 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_backend_fake_transactions.py
|
tests.a_unit.test_backend_fake_transactions.TestBackendBase
|
class TestBackendBase:
def test_check_password_correct(self, defaults):
""" Test if we can logon correctly with correct password. """
result = tldap.backend.connection.check_password(
'cn=Manager,dc=python-ldap,dc=org',
'password'
)
assert result is True
defaults.mock_class.assert_called_once_with(
defaults.expected_server,
authentication='SIMPLE',
password='password',
user='cn=Manager,dc=python-ldap,dc=org')
expected_calls = [call.open(), call.bind(), call.unbind()]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_check_password_wrong(self, defaults):
""" Test that we can't logon correctly with wrong password. """
defaults.mock_connection.bind.side_effect = \
errors.LDAPInvalidCredentialsResult()
result = tldap.backend.connection.check_password(
'cn=Manager,dc=python-ldap,dc=org',
'password2'
)
assert result is False
defaults.mock_class.assert_called_once_with(
defaults.expected_server,
authentication='SIMPLE',
password='password2',
user='cn=Manager,dc=python-ldap,dc=org')
expected_calls = [call.open(), call.bind(), call.unbind()]
defaults.mock_connection.assert_has_calls(expected_calls)
def test_search(self, search_response, defaults):
""" Test base search scope. """
dn = 'uid=tux,ou=People,dc=python-ldap,dc=org'
search_response.add(dn, defaults.modlist)
search_response.add(dn, defaults.modlist)
c = tldap.backend.connection
mock_dn = mock.Mock()
mock_scope = mock.Mock()
mock_filter = mock.Mock()
mock_limit = mock.Mock()
r = c.search(mock_dn, mock_scope, mock_filter, limit=mock_limit)
assert next(r)[1] == defaults.modlist
assert next(r)[1] == defaults.modlist
with pytest.raises(StopIteration):
next(r)
expected_calls = [
call.open(),
call.bind(),
call.search(
mock_dn, mock_filter, mock_scope,
attributes=ANY, paged_size=mock_limit),
]
defaults.mock_connection.assert_has_calls(expected_calls)
|
class TestBackendBase:
def test_check_password_correct(self, defaults):
''' Test if we can logon correctly with correct password. '''
pass
def test_check_password_wrong(self, defaults):
''' Test that we can't logon correctly with wrong password. '''
pass
def test_search(self, search_response, defaults):
''' Test base search scope. '''
pass
| 4 | 3 | 21 | 3 | 17 | 1 | 1 | 0.06 | 0 | 1 | 0 | 0 | 3 | 0 | 3 | 3 | 65 | 11 | 51 | 16 | 47 | 3 | 30 | 16 | 26 | 1 | 0 | 1 | 3 |
142,180 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_backend_fake_transactions.py
|
tests.a_unit.test_backend_fake_transactions.ServerComparer
|
class ServerComparer:
def __init__(self, server):
self._server = server
assert isinstance(server, ldap3.Server)
def __eq__(self, other):
if not isinstance(other, ldap3.Server):
return False
if self._server.name != other.name:
return False
return True
|
class ServerComparer:
def __init__(self, server):
pass
def __eq__(self, other):
pass
| 3 | 0 | 5 | 0 | 5 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 11 | 1 | 10 | 4 | 7 | 0 | 10 | 4 | 7 | 3 | 0 | 1 | 4 |
142,181 |
Karaage-Cluster/python-tldap
|
tests/a_unit/test_backend_fake_transactions.py
|
tests.a_unit.test_backend_fake_transactions.MockSearchResponse
|
class MockSearchResponse:
def __init__(self):
self.response = []
def add(self, dn, db_values):
self.response.append({
'type': 'searchResEntry',
'dn': dn,
'raw_attributes': db_values,
})
def __getitem__(self, key):
return self.response[key]
def __iter__(self):
return iter(self.response)
def __len__(self):
return len(self.response)
|
class MockSearchResponse:
def __init__(self):
pass
def add(self, dn, db_values):
pass
def __getitem__(self, key):
pass
def __iter__(self):
pass
def __len__(self):
pass
| 6 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 5 | 1 | 5 | 5 | 19 | 4 | 15 | 7 | 9 | 0 | 11 | 7 | 5 | 1 | 0 | 0 | 5 |
142,182 |
Karaage-Cluster/python-tldap
|
tldap/test/slapd.py
|
tldap.test.slapd.Slapd
|
class Slapd:
"""
Controller class for a slapd instance, OpenLDAP's server.
This class creates a temporary data store for slapd, runs it
on a private port, and initialises it with a top-level dc and
the root user.
When a reference to an instance of this class is lost, the slapd
server is shut down.
"""
_log = logging.getLogger("Slapd")
# Use /var/tmp to placate apparmour on Ubuntu:
TEST_UTILS_DIR = os.path.abspath(os.path.split(__file__)[0])
PATH_SCHEMA_DIR = TEST_UTILS_DIR + "/ldap_schemas/"
PATH_LDAPADD = "ldapadd"
PATH_LDAPSEARCH = "ldapsearch"
PATH_SLAPD = "slapd"
PATH_SLAP_TEST = "slaptest"
def __init__(self) -> None:
self._proc = None
self._proc_config: Optional[str] = None
self._port: int = 0
self._tmpdir: Optional[str] = None
self._dn_suffix: str = "dc=python-ldap,dc=org"
self._root_cn: str = "Manager"
self._root_password: str = "password"
self._slapd_debug_level: int or str = 0
self._env: Dict[str, str] = {
'PATH': os.getenv('PATH')
}
# Setters
def set_port(self, port: int) -> None:
self._port = port
def set_dn_suffix(self, dn: str) -> None:
self._dn_suffix = dn
def set_root_cn(self, cn: str) -> None:
self._root_cn = cn
def set_root_password(self, pw: str) -> None:
self._root_password = pw
def set_slapd_debug_level(self, level: int or str) -> None:
self._slapd_debug_level = level
def set_debug(self) -> None:
self._log.setLevel(logging.DEBUG)
self.set_slapd_debug_level('Any')
# getters
def get_url(self) -> str:
return "ldap://%s:%d/" % self.get_address()
def get_address(self) -> Tuple[str, int]:
if self._port == 0:
self._port = find_available_tcp_port(LOCALHOST)
return LOCALHOST, self._port
def get_dn_suffix(self) -> str:
return self._dn_suffix
def get_root_dn(self) -> str:
return "cn=" + self._root_cn + "," + self.get_dn_suffix()
def get_root_password(self) -> str:
return self._root_password
def _setup_tmp_dir(self) -> None:
self._tmpdir = tempfile.mkdtemp()
ldif_dir = mkdirs(os.path.join(self._tmpdir, "ldif-data"))
delete_directory_content(ldif_dir) # clear it out
self._proc_config = os.path.join(self._tmpdir, "slapd.conf")
def _configure(self) -> List[str]:
"""
Appends slapd.conf configuration lines to cfg.
Also re-initializes any backing storage.
Feel free to subclass and override this method.
"""
ldif_dir = os.path.join(self._tmpdir, "ldif-data")
cfg = []
# Global
schema_list = os.listdir(self.PATH_SCHEMA_DIR)
schema_list.sort()
for schema in schema_list:
cfg.append("include " + quote(self.PATH_SCHEMA_DIR + schema))
cfg.append("allow bind_v2")
# Database
cfg.append("moduleload back_mdb")
cfg.append("moduleload ppolicy")
cfg.append('')
cfg.append("database mdb")
cfg.append("directory " + quote(ldif_dir))
cfg.append("suffix " + quote(self.get_dn_suffix()))
cfg.append("overlay ppolicy")
cfg.append(f'ppolicy_default {quote("cn=default,"+self.get_dn_suffix())}')
cfg.append("# rootdn " + quote(self.get_root_dn()))
cfg.append("# rootpw " + quote(
lp.encode_password(self.get_root_password())))
cfg.append('')
cfg.append(f'access to dn.sub={quote(self.get_dn_suffix())} attrs=userPassword')
cfg.append(' by anonymous auth')
cfg.append('')
cfg.append(f'access to dn.sub={quote(self.get_dn_suffix())}')
cfg.append(f' by dn.exact={quote(self.get_root_dn())} write')
cfg.append('')
return cfg
def _write_config(self) -> None:
"""Writes the slapd.conf file out, and returns the path to it."""
cfg = self._configure()
path = self._proc_config
mkdirs(self._tmpdir)
if os.access(path, os.F_OK):
self._log.debug("deleting existing %s", path)
os.remove(path)
self._log.debug("writing config to %s", path)
f = open(path, "w")
f.writelines([line + "\n" for line in cfg])
f.close()
def _populate(self) -> None:
suffix_dc = self.get_dn_suffix().split(',')[0][3:]
root_cn = self.get_root_dn().split(',')[0][3:]
p = os.path.join(self._tmpdir, "admin.ldif")
with open(p, "w") as f:
f.write(f"dn: {self.get_dn_suffix()}\n")
f.write(f"dc: {suffix_dc}\n")
f.write(f"o: {suffix_dc}\n")
f.write("objectClass: dcObject\n")
f.write("objectClass: organization\n")
f.write("\n")
f.write(f"dn: {self.get_root_dn()}\n")
f.write(f"cn: {root_cn}\n")
f.write("objectClass: simpleSecurityObject\n")
f.write("objectClass: organizationalRole\n")
f.write(f"userPassword: {lp.encode_password(self.get_root_password())}\n")
f.write("\n")
f.write(f'dn: cn=default,{self.get_dn_suffix()}\n')
f.write('objectClass: top\n')
f.write('objectClass: device\n')
f.write('objectClass: pwdPolicy\n')
f.write('pwdAttribute: userPassword\n')
f.write('pwdLockout: TRUE\n')
f.write("\n")
f.write(f'dn: ou=People,{self.get_dn_suffix()}\n')
f.write('objectClass: top\n')
f.write('objectClass: OrganizationalUnit\n')
f.write('ou: People\n')
f.write("\n")
f.write(f'dn: ou=Groups,{self.get_dn_suffix()}\n')
f.write('objectClass: top\n')
f.write('objectClass: OrganizationalUnit\n')
f.write('ou: Groups\n')
config_path = os.path.join(self._tmpdir, "slapd.conf")
subprocess.check_call(["slapadd", "-n", "1", "-f", config_path, "-l", p])
def start(self) -> None:
"""
Starts the slapd server process running, and waits for it to come up.
"""
if self._proc is None:
ok = False
try:
self._setup_tmp_dir()
self._write_config()
self._populate()
self._test_configuration()
if is_port_in_use(self._port):
raise Exception('Port %s is already in use' % self._port)
self._start_slapd()
self._wait_for_slapd()
ok = True
self._log.debug("slapd ready at %s", self.get_url())
finally:
if not ok:
if self._proc:
self.stop()
def _start_slapd(self) -> None:
# Spawns/forks the slapd process
self._log.info("starting slapd")
self._proc = subprocess.Popen([
self.PATH_SLAPD,
"-f", self._proc_config,
"-h", self.get_url(),
"-d", str(self._slapd_debug_level),
], env=self._env)
def _wait_for_slapd(self) -> None:
# Waits until the LDAP server socket is open, or slapd crashed
s = socket.socket()
while 1:
if self._proc.poll() is not None:
self._stopped()
raise RuntimeError("slapd exited before opening port")
try:
self._log.debug("Connecting to %s", repr(self.get_address()))
s.connect(self.get_address())
s.close()
return
except socket.error:
time.sleep(1)
def stop(self) -> None:
"""Stops the slapd server, and waits for it to terminate"""
if self._proc is not None:
self._log.debug("stopping slapd")
if hasattr(self._proc, 'terminate'):
self._proc.terminate()
else:
import posix
import signal
posix.kill(self._proc.pid, signal.SIGHUP)
# time.sleep(1)
# posix.kill(self._proc.pid, signal.SIGTERM)
# posix.kill(self._proc.pid, signal.SIGKILL)
self.wait()
def restart(self) -> None:
"""
Restarts the slapd server; ERASING previous content.
Starts the server even it if isn't already running.
"""
self.stop()
self.start()
def wait(self) -> None:
"""Waits for the slapd process to terminate by itself."""
if self._proc:
self._proc.wait()
self._stopped()
def _stopped(self) -> None:
"""Called when the slapd server is known to have terminated"""
if self._proc is not None:
self._log.info("slapd terminated")
self._proc = None
self._proc_config = None
if self._tmpdir is not None:
import shutil
shutil.rmtree(self._tmpdir)
self._tmpdir = None
def _test_configuration(self) -> None:
self._log.debug("testing configuration")
verbose_flag = "-Q"
if self._log.isEnabledFor(logging.DEBUG):
verbose_flag = "-v"
p = subprocess.Popen(
[
self.PATH_SLAP_TEST,
verbose_flag,
"-f", self._proc_config,
], env=self._env)
if p.wait() != 0:
raise RuntimeError("configuration test failed")
self._log.debug("configuration seems ok")
def ldap_add(self, ldif: str, extra_args: Optional[List] = None) -> None:
"""Runs ldapadd on this slapd instance, passing it the ldif content"""
if extra_args is None:
extra_args = []
self._log.debug("adding %s", repr(ldif))
p = subprocess.Popen([
self.PATH_LDAPADD,
"-x",
"-D", self.get_root_dn(),
"-w", self.get_root_password(),
"-H", self.get_url()] + extra_args,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
env=self._env)
p.communicate(ldif.encode("utf_8"))
if p.wait() != 0:
raise RuntimeError("ldapadd process failed")
def ldap_search(self, base: Optional[str] = None,
filter: str = '(objectClass=*)',
attrs: Optional[List[str]] = None,
scope: str = 'sub',
extra_args: Optional[List[str]] = None):
if base is None:
base = self.get_dn_suffix()
if attrs is None:
attrs = []
if extra_args is None:
extra_args = []
self._log.debug("ldapsearch filter=%s", repr(filter))
p = subprocess.Popen([
self.PATH_LDAPSEARCH,
"-x",
"-D", self.get_root_dn(),
"-w", self.get_root_password(),
"-H", self.get_url(),
"-b", base,
"-s", scope,
"-LL", ] + extra_args + [filter] + attrs,
stdout=subprocess.PIPE,
env=self._env)
output = p.communicate()[0]
if p.wait() != 0:
raise RuntimeError("ldapadd process failed")
# RFC 2849: LDIF format
# unfold
lines = []
output = output.decode("utf_8")
for line in output.split('\n'):
if line.startswith(' '):
lines[-1] = lines[-1] + line[1:]
elif line == '' and lines and lines[-1] == '':
pass # ignore multiple blank lines
else:
lines.append(line)
# Remove comments
lines = [line for line in lines if not line.startswith("#")]
# Remove leading version and blank line(s)
if lines and lines[0] == '':
del lines[0]
if not lines or lines[0] != 'version: 1':
raise RuntimeError("expected 'version: 1', got " + repr(lines[:1]))
del lines[0]
if lines and lines[0] == '':
del lines[0]
# ensure the ldif ends with a blank line (unless it is just blank)
if lines and lines[-1] != '':
lines.append('')
objects = []
obj = []
for line in lines:
if line == '': # end of an object
if obj[0][0] != 'dn':
raise RuntimeError("first line not dn", repr(obj))
objects.append((obj[0][1], obj[1:]))
obj = []
else:
attr, value = line.split(':', 2)
if value.startswith(': '):
value = base64.decodebytes(value[2:])
elif value.startswith(' '):
value = value[1:]
else:
raise RuntimeError("bad line: " + repr(line))
obj.append((attr, value))
assert obj == []
return objects
|
class Slapd:
'''
Controller class for a slapd instance, OpenLDAP's server.
This class creates a temporary data store for slapd, runs it
on a private port, and initialises it with a top-level dc and
the root user.
When a reference to an instance of this class is lost, the slapd
server is shut down.
'''
def __init__(self) -> None:
pass
def set_port(self, port: int) -> None:
pass
def set_dn_suffix(self, dn: str) -> None:
pass
def set_root_cn(self, cn: str) -> None:
pass
def set_root_password(self, pw: str) -> None:
pass
def set_slapd_debug_level(self, level: int or str) -> None:
pass
def set_debug(self) -> None:
pass
def get_url(self) -> str:
pass
def get_address(self) -> Tuple[str, int]:
pass
def get_dn_suffix(self) -> str:
pass
def get_root_dn(self) -> str:
pass
def get_root_password(self) -> str:
pass
def _setup_tmp_dir(self) -> None:
pass
def _configure(self) -> List[str]:
'''
Appends slapd.conf configuration lines to cfg.
Also re-initializes any backing storage.
Feel free to subclass and override this method.
'''
pass
def _write_config(self) -> None:
'''Writes the slapd.conf file out, and returns the path to it.'''
pass
def _populate(self) -> None:
pass
def start(self) -> None:
'''
Starts the slapd server process running, and waits for it to come up.
'''
pass
def _start_slapd(self) -> None:
pass
def _wait_for_slapd(self) -> None:
pass
def stop(self) -> None:
'''Stops the slapd server, and waits for it to terminate'''
pass
def restart(self) -> None:
'''
Restarts the slapd server; ERASING previous content.
Starts the server even it if isn't already running.
'''
pass
def wait(self) -> None:
'''Waits for the slapd process to terminate by itself.'''
pass
def _stopped(self) -> None:
'''Called when the slapd server is known to have terminated'''
pass
def _test_configuration(self) -> None:
pass
def ldap_add(self, ldif: str, extra_args: Optional[List] = None) -> None:
'''Runs ldapadd on this slapd instance, passing it the ldif content'''
pass
def ldap_search(self, base: Optional[str] = None,
filter: str = '(objectClass=*)',
attrs:
pass
| 27 | 9 | 12 | 1 | 11 | 1 | 2 | 0.16 | 0 | 6 | 0 | 0 | 26 | 9 | 26 | 26 | 368 | 47 | 281 | 75 | 247 | 46 | 240 | 70 | 210 | 17 | 0 | 4 | 61 |
142,183 |
Karaage-Cluster/python-tldap
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Karaage-Cluster_python-tldap/tldap/transaction.py
|
tldap.transaction.Transaction
|
class Transaction(object):
"""
Acts as either a decorator, or a context manager. If it's a decorator it
takes a function and returns a wrapped function. If it's a contextmanager
it's used with the ``with`` statement. In either event entering/exiting
are called before and after, respectively, the function/block is executed.
autocommit, commit_on_success, and commit_manually contain the
implementations of entering and exiting.
"""
def __init__(self, entering, exiting, using):
self.entering = entering
self.exiting = exiting
self.using = using
def __enter__(self):
self.entering(self.using)
def __exit__(self, exc_type, exc_value, traceback):
self.exiting(exc_value, self.using)
def __call__(self, func):
@wraps(func)
def inner(*args, **kwargs):
# Once we drop support for Python 2.4 this block should become:
# with self:
# func(*args, **kwargs)
self.__enter__()
try:
res = func(*args, **kwargs)
except: # noqa: E722
self.__exit__(*sys.exc_info())
raise
else:
self.__exit__(None, None, None)
return res
return inner
|
class Transaction(object):
'''
Acts as either a decorator, or a context manager. If it's a decorator it
takes a function and returns a wrapped function. If it's a contextmanager
it's used with the ``with`` statement. In either event entering/exiting
are called before and after, respectively, the function/block is executed.
autocommit, commit_on_success, and commit_manually contain the
implementations of entering and exiting.
'''
def __init__(self, entering, exiting, using):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def __call__(self, func):
pass
@wraps(func)
def inner(*args, **kwargs):
pass
| 7 | 1 | 7 | 0 | 6 | 2 | 1 | 0.55 | 1 | 0 | 0 | 0 | 4 | 3 | 4 | 4 | 37 | 4 | 22 | 11 | 15 | 12 | 21 | 10 | 15 | 2 | 1 | 1 | 6 |
142,184 |
Karaage-Cluster/python-tldap
|
tldap/fields.py
|
tldap.fields.SecondsSinceEpochField
|
class SecondsSinceEpochField(Field):
""" Field is an integer containing number of seconds since epoch. """
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be a bytes")
try:
value = int(value)
except (TypeError, ValueError):
raise tldap.exceptions.ValidationError("is invalid integer")
try:
value = datetime.datetime.utcfromtimestamp(value)
except OverflowError:
raise tldap.exceptions.ValidationError("is too big a date")
return value
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
assert isinstance(value, datetime.datetime)
try:
value = value - datetime.datetime(1970, 1, 1)
except OverflowError:
raise tldap.exceptions.ValidationError("is too big a date")
value = value.seconds + value.days * 24 * 3600
value = str(value).encode("utf_8")
return value
def value_validate(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, datetime.datetime):
raise tldap.exceptions.ValidationError("is invalid date time")
|
class SecondsSinceEpochField(Field):
''' Field is an integer containing number of seconds since epoch. '''
def value_to_python(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
def value_to_db(self, value):
''' Returns field's single value prepared for saving into a database. '''
pass
def value_validate(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
| 4 | 4 | 14 | 2 | 8 | 4 | 3 | 0.56 | 1 | 8 | 1 | 0 | 3 | 0 | 3 | 13 | 48 | 9 | 25 | 4 | 21 | 14 | 25 | 4 | 21 | 4 | 2 | 1 | 8 |
142,185 |
Karaage-Cluster/python-tldap
|
tldap/fields.py
|
tldap.fields.SidField
|
class SidField(Field):
""" Field is a binary representation of a Microsoft SID. """
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be a bytes")
length = len(value) - 8
if length % 4 != 0:
raise tldap.exceptions.ValidationError("Invalid sid")
length = length // 4
array = struct.unpack('<bbbbbbbb' + 'I' * length, value)
if array[1] != length:
raise tldap.exceptions.ValidationError("Invalid sid")
if array[2:7] != (0, 0, 0, 0, 0):
raise tldap.exceptions.ValidationError("Invalid sid")
array = ("S", ) + array[0:1] + array[7:]
return "-".join([str(i) for i in array])
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
assert isinstance(value, str)
array = value.split("-")
length = len(array) - 3
assert length >= 0
assert array[0] == 'S'
array = array[1:2] + [length, 0, 0, 0, 0, 0] + array[2:]
array = [int(i) for i in array]
return struct.pack('<bbbbbbbb' + 'I' * length, *array)
def value_validate(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, str):
raise tldap.exceptions.ValidationError("Invalid sid")
array = value.split("-")
length = len(array) - 3
if length < 1:
raise tldap.exceptions.ValidationError("Invalid sid")
if array.pop(0) != "S":
raise tldap.exceptions.ValidationError("Invalid sid")
try:
[int(i) for i in array]
except TypeError:
raise tldap.exceptions.ValidationError("Invalid sid")
|
class SidField(Field):
''' Field is a binary representation of a Microsoft SID. '''
def value_to_python(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
def value_to_db(self, value):
''' Returns field's single value prepared for saving into a database. '''
pass
def value_validate(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
| 4 | 4 | 21 | 5 | 12 | 4 | 4 | 0.38 | 1 | 5 | 1 | 0 | 3 | 0 | 3 | 13 | 69 | 18 | 37 | 10 | 33 | 14 | 37 | 10 | 33 | 5 | 2 | 1 | 11 |
142,186 |
Karaage-Cluster/python-tldap
|
tldap/fields.py
|
tldap.fields.IntegerField
|
class IntegerField(Field):
""" Field contains an integer value. """
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be bytes")
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise tldap.exceptions.ValidationError("is invalid integer")
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
assert isinstance(value, six.integer_types)
return str(value).encode("utf_8")
def value_validate(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, six.integer_types):
raise tldap.exceptions.ValidationError("should be a integer")
try:
return str(value)
except (TypeError, ValueError):
raise tldap.exceptions.ValidationError("is invalid integer")
|
class IntegerField(Field):
''' Field contains an integer value. '''
def value_to_python(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
def value_to_db(self, value):
''' Returns field's single value prepared for saving into a database. '''
pass
def value_validate(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
| 4 | 4 | 11 | 0 | 6 | 4 | 3 | 0.7 | 1 | 6 | 1 | 0 | 3 | 0 | 3 | 13 | 38 | 4 | 20 | 4 | 16 | 14 | 20 | 4 | 16 | 4 | 2 | 1 | 8 |
142,187 |
Karaage-Cluster/python-tldap
|
tldap/fields.py
|
tldap.fields.Field
|
class Field(object):
""" The base field type. """
db_field = True
def __init__(self, max_instances=1, required=False):
self._max_instances = max_instances
self._required = required
@property
def is_list(self):
return self._max_instances != 1
def to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
# ensure value is valid
self.validate(value)
assert isinstance(value, list)
value = list(value)
for i, v in enumerate(value):
value[i] = self.value_to_db(v)
# return result
assert isinstance(value, list)
return value
def to_python(self, value):
"""
Converts the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Returns the converted value. Subclasses should override this.
"""
assert isinstance(value, list)
# convert every value in list
value = list(value)
for i, v in enumerate(value):
value[i] = self.value_to_python(v)
# return result
return value
def validate(self, value):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
# check object type
if not isinstance(value, list):
raise tldap.exceptions.ValidationError(
"is not a list and max_instances is %s" %
self._max_instances)
# check maximum instances
if (self._max_instances is not None and
len(value) > self._max_instances):
raise tldap.exceptions.ValidationError(
"exceeds max_instances of %d" %
self._max_instances)
# check this required value is given
if self._required:
if len(value) == 0:
raise tldap.exceptions.ValidationError(
"is required")
# validate the value
for i, v in enumerate(value):
self.value_validate(v)
def clean(self, value):
"""
Convert the value's type and run validation. Validation errors from
to_python and validate are propagated. The correct value is returned if
no error is raised.
"""
value = self.to_python(value)
self.validate(value)
return value
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
raise RuntimeError("Not implemented")
def value_to_filter(self, value):
return self.value_to_db(value)
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
raise RuntimeError("Not implemented")
def value_validate(self, value):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
raise RuntimeError("Not implemented")
|
class Field(object):
''' The base field type. '''
def __init__(self, max_instances=1, required=False):
pass
@property
def is_list(self):
pass
def to_db(self, value):
''' Returns field's single value prepared for saving into a database. '''
pass
def to_python(self, value):
'''
Converts the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Returns the converted value. Subclasses should override this.
'''
pass
def validate(self, value):
'''
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
'''
pass
def clean(self, value):
'''
Convert the value's type and run validation. Validation errors from
to_python and validate are propagated. The correct value is returned if
no error is raised.
'''
pass
def value_to_db(self, value):
''' Returns field's single value prepared for saving into a database. '''
pass
def value_to_filter(self, value):
pass
def value_to_python(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
def value_validate(self, value):
'''
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
'''
pass
| 12 | 8 | 9 | 1 | 5 | 3 | 2 | 0.7 | 1 | 4 | 1 | 8 | 10 | 2 | 10 | 10 | 100 | 15 | 50 | 18 | 38 | 35 | 43 | 17 | 32 | 6 | 1 | 2 | 17 |
142,188 |
Karaage-Cluster/python-tldap
|
tldap/fields.py
|
tldap.fields.FakeField
|
class FakeField(Field):
db_field = False
""" Field contains a binary value that can not be interpreted in anyway.
"""
# def get_db(self, db_data):
# return None
#
# def set_db(self, db_data, python_value):
# pass
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
return None
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
return None
def value_validate(self, value):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
|
class FakeField(Field):
def value_to_db(self, value):
''' Returns field's single value prepared for saving into a database. '''
pass
def value_to_python(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
def value_validate(self, value):
'''
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
'''
pass
| 4 | 3 | 5 | 0 | 2 | 4 | 1 | 2.57 | 1 | 0 | 0 | 0 | 3 | 0 | 3 | 13 | 29 | 4 | 7 | 5 | 3 | 18 | 7 | 5 | 3 | 1 | 2 | 0 | 3 |
142,189 |
Karaage-Cluster/python-tldap
|
tldap/fields.py
|
tldap.fields.DaysSinceEpochField
|
class DaysSinceEpochField(Field):
""" Field is an integer containing number of days since epoch. """
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be a bytes")
try:
value = int(value)
except (TypeError, ValueError):
raise tldap.exceptions.ValidationError("is invalid integer")
try:
value = datetime.date.fromtimestamp(value * 24 * 60 * 60)
except OverflowError:
raise tldap.exceptions.ValidationError("is too big a date")
return value
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
assert isinstance(value, datetime.date)
assert not isinstance(value, datetime.datetime)
try:
value = value - datetime.date(year=1970, month=1, day=1)
except OverflowError:
raise tldap.exceptions.ValidationError("is too big a date")
return str(value.days).encode("utf_8")
def value_validate(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, datetime.date):
raise tldap.exceptions.ValidationError("is invalid date")
# a datetime is also a date but they are not compatable
if isinstance(value, datetime.datetime):
raise tldap.exceptions.ValidationError("should be a date, not a datetime")
|
class DaysSinceEpochField(Field):
''' Field is an integer containing number of days since epoch. '''
def value_to_python(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
def value_to_db(self, value):
''' Returns field's single value prepared for saving into a database. '''
pass
def value_validate(self, value):
'''
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
'''
pass
| 4 | 4 | 15 | 2 | 8 | 5 | 3 | 0.58 | 1 | 9 | 1 | 0 | 3 | 0 | 3 | 13 | 49 | 8 | 26 | 4 | 22 | 15 | 26 | 4 | 22 | 4 | 2 | 1 | 9 |
142,190 |
Karaage-Cluster/python-tldap
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Karaage-Cluster_python-tldap/tldap/django/models.py
|
tldap.django.models.Counters.Meta
|
class Meta:
db_table = 'tldap_counters'
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 1 | 0 | 2 | 2 | 1 | 0 | 0 | 0 | 0 |
142,191 |
KarchinLab/probabilistic2020
|
KarchinLab_probabilistic2020/prob2020/python/amino_acid.py
|
prob2020.python.amino_acid.AminoAcid
|
class AminoAcid(object):
""" The AminoAcid class represents aa changes in the Cosmic Database.
The AminoAcid class follows the syntax of HGVS
(http://www.hgvs.org/mutnomen/recs-prot.html). Although the parsing
generally follows the HGVS syntax, it does have slight variations where
the COSMIC database uses idiosyncratic conventions.
The AminoAcid class is only intended to be used in the following way::
>>> aa = AminoAcid('p.A267C')
>>> aa.pos
267
>>> aa.is_missense
True
Namely, the constructor parses the necessary HGVS string and then extracts
attributes that can be used.
"""
def __init__(self, hgvs='', occurrence=1):
self.logger = logging.getLogger(__name__)
# initialize flags to prevent errors
self.is_non_silent = False
self.is_synonymous = False
# parse HGVS string
if not (type(hgvs) is str or type(hgvs) is type(u'')):
# catches cases where wierd non-string input is used
self.is_valid = False
self.set_mutation_type()
elif 'P.' not in hgvs.upper():
# don't use mutations without "p." syntax
# many cases, these are "junk" and clearly
# do not represent a mutation
self.is_valid = False
self.set_mutation_type()
else:
# expected case of string
self.hgvs_original = hgvs
hgvs = hgvs.upper().replace('>', '') # convert everything to upper case
self.hgvs = hgvs
self.occurrence = occurrence
self.set_amino_acid(hgvs)
self.set_mutation_type()
def set_mutation_type(self, mut_type=''):
"""Sets the mutation type attribute to a single label based on
attribute flags.
Kwargs:
mut_type (str): value to set self.mut_type
"""
if mut_type:
# user specifies a mutation type
self.mutation_type = mut_type
else:
# mutation type is taken from object attributes
if not self.is_valid:
# does not correctly fall into a category
self.mutation_type = 'not valid'
elif self.unknown_effect:
self.mutation_type = 'unknown effect'
elif self.is_no_protein:
self.mutation_type = 'no protein'
elif self.is_missing_info:
# mutation has a ?
self.mutation_type = 'missing'
else:
# valid mutation type to be counted
if self.is_lost_stop:
self.mutation_type = 'Nonstop_Mutation'
elif self.is_lost_start:
self.mutation_type = 'Translation_Start_Site'
elif self.is_synonymous:
# synonymous must go before missense since mutations
# can be categorized as synonymous and missense. Although
# in reality such cases are actually synonymous and not
# missense mutations.
self.mutation_type = 'Silent'
elif self.is_missense:
self.mutation_type = 'Missense_Mutation'
elif self.is_indel:
self.mutation_type = 'In_Frame_Indel'
elif self.is_nonsense_mutation:
self.mutation_type = 'Nonsense_Mutation'
elif self.is_frame_shift:
self.mutation_type = 'Frame_Shift_Indel'
def set_occurrence(self, occur):
self.occurrence = occur
def set_amino_acid(self, aa):
"""Set amino acid change and position."""
aa = aa.upper() # make sure it is upper case
aa = aa[2:] if aa.startswith('P.') else aa # strip "p."
self.__set_mutation_status() # set flags detailing the type of mutation
self.__parse_hgvs_syntax(aa) # read in specific mutations
def __set_mutation_status(self):
# strip "p." from HGVS protein syntax
hgvs_tmp = self.hgvs[2:] if self.hgvs.startswith("P.") else self.hgvs
# set evidence status
self.__set_unkown_effect(hgvs_tmp) # unknown effect
self.__set_no_protein(hgvs_tmp) # no protein
self.__set_mutation_type(hgvs_tmp) # indel, missense, etc.
def __set_mutation_type(self, hgvs_string):
"""Interpret the mutation type (missense, etc.) and set appropriate flags.
Args:
hgvs_string (str): hgvs syntax with "p." removed
"""
self.__set_lost_stop_status(hgvs_string)
self.__set_lost_start_status(hgvs_string)
self.__set_missense_status(hgvs_string) # missense mutations
self.__set_indel_status() # indel mutations
self.__set_frame_shift_status() # check for fs
self.__set_premature_stop_codon_status(hgvs_string) # check for stops
def __set_missense_status(self, hgvs_string):
"""Sets the self.is_missense flag."""
# set missense status
if re.search('^[A-Z?]\d+[A-Z?]$', hgvs_string):
self.is_missense = True
self.is_non_silent = True
else:
self.is_missense = False
def __set_lost_start_status(self, hgvs_string):
"""Sets the self.is_lost_start flag."""
# set is lost start status
mymatch = re.search('^([A-Z?])(\d+)([A-Z?])$', hgvs_string)
if mymatch:
grps = mymatch.groups()
if int(grps[1]) == 1 and grps[0] != grps[2]:
self.is_lost_start = True
self.is_non_silent = True
else:
self.is_lost_start = False
else:
self.is_lost_start = False
def __set_frame_shift_status(self):
"""Check for frame shift and set the self.is_frame_shift flag."""
if 'fs' in self.hgvs_original:
self.is_frame_shift = True
self.is_non_silent = True
elif re.search('[A-Z]\d+[A-Z]+\*', self.hgvs_original):
# it looks like some mutations dont follow the convention
# of using 'fs' to indicate frame shift
self.is_frame_shift = True
self.is_non_silent = True
else:
self.is_frame_shift = False
def __set_lost_stop_status(self, hgvs_string):
"""Check if the stop codon was mutated to something other than
a stop codon."""
lost_stop_pattern = '^\*\d+[A-Z?]+\*?$'
if re.search(lost_stop_pattern, hgvs_string):
self.is_lost_stop = True
self.is_non_silent = True
else:
self.is_lost_stop = False
def __set_premature_stop_codon_status(self, hgvs_string):
"""Set whether there is a premature stop codon."""
if re.search('.+\*(\d+)?$', hgvs_string):
self.is_premature_stop_codon = True
self.is_non_silent = True
# check if it is also a nonsense mutation
if hgvs_string.endswith('*'):
self.is_nonsense_mutation = True
else:
self.is_nonsense_mutation = False
else:
self.is_premature_stop_codon = False
self.is_nonsense_mutation = False
def __set_indel_status(self):
"""Sets flags related to the mutation being an indel."""
# set indel status
if "ins" in self.hgvs_original:
# mutation is insertion
self.is_insertion = True
self.is_deletion = False
self.is_indel = True
self.is_non_silent = True
elif "del" in self.hgvs_original:
# mutation is deletion
self.is_deletion = True
self.is_insertion = False
self.is_indel = True
self.is_non_silent = True
else:
# not an indel
self.is_deletion = False
self.is_insertion = False
self.is_indel = False
def __set_unkown_effect(self, hgvs_string):
"""Sets a flag for unkown effect according to HGVS syntax. The
COSMIC database also uses unconventional questionmarks to denote
missing information.
Args:
hgvs_string (str): hgvs syntax with "p." removed
"""
# Standard use by HGVS of indicating unknown effect.
unknown_effect_list = ['?', '(=)', '='] # unknown effect symbols
if hgvs_string in unknown_effect_list:
self.unknown_effect = True
elif "(" in hgvs_string:
# parethesis in HGVS indicate expected outcomes
self.unknown_effect = True
else:
self.unknown_effect = False
# detect if there are missing information. commonly COSMIC will
# have insertions with p.?_?ins? or deleteions with ?del indicating
# missing information.
if "?" in hgvs_string:
self.is_missing_info = True
else:
self.is_missing_info = False
def __set_no_protein(self, hgvs_string):
"""Set a flag for no protein expected. ("p.0" or "p.0?")
Args:
hgvs_string (str): hgvs syntax with "p." removed
"""
no_protein_list = ['0', '0?'] # no protein symbols
if hgvs_string in no_protein_list:
self.is_no_protein = True
self.is_non_silent = True
else:
self.is_no_protein = False
def __parse_hgvs_syntax(self, aa_hgvs):
"""Convert HGVS syntax for amino acid change into attributes.
Specific details of the mutation are stored in attributes like
self.intial (prior to mutation), sel.pos (mutation position),
self.mutated (mutation), and self.stop_pos (position of stop codon,
if any).
Args:
aa_hgvs (str): amino acid string following HGVS syntax
"""
self.is_valid = True # assume initially the syntax is legitimate
self.is_synonymous = False # assume not synonymous until proven
if self.unknown_effect or self.is_no_protein:
# unknown effect from mutation. usually denoted as p.?
self.pos = None
pass
elif self.is_lost_stop:
self.initial = aa_hgvs[0]
self.mutated = re.findall('([A-Z?*]+)$', aa_hgvs)[0]
self.pos = int(re.findall('^\*(\d+)', aa_hgvs)[0])
self.stop_pos = None
elif self.is_lost_start:
self.initial = aa_hgvs[0]
self.mutated = aa_hgvs[-1]
self.pos = int(aa_hgvs[1:-1])
elif self.is_missense:
self.initial = aa_hgvs[0]
self.mutated = aa_hgvs[-1]
self.pos = int(aa_hgvs[1:-1])
self.stop_pos = None # not a nonsense mutation
if self.initial == self.mutated:
self.is_synonymous = True
self.is_non_silent = False
elif self.mutated == '*':
self.is_nonsense_mutation = True
elif self.is_indel:
if self.is_insertion:
if not self.is_missing_info:
self.initial = re.findall('([A-Z])\d+', aa_hgvs)[:2] # first two
self.pos = tuple(map(int, re.findall('[A-Z](\d+)', aa_hgvs)[:2])) # first two
self.mutated = re.findall('(?<=INS)[A-Z0-9?*]+', aa_hgvs)[0]
self.mutated = self.mutated.strip('?') # remove the missing info '?'
else:
self.initial = ''
self.pos = tuple()
self.mutated = ''
elif self.is_deletion:
if not self.is_missing_info:
self.initial = re.findall('([A-Z])\d+', aa_hgvs)
self.pos = tuple(map(int, re.findall('[A-Z](\d+)', aa_hgvs)))
self.mutated = re.findall('(?<=DEL)[A-Z]*', aa_hgvs)[0]
else:
self.initial = ''
self.pos = tuple()
self.mutated = ''
elif self.is_frame_shift:
self.initial = aa_hgvs[0]
self.mutated = ''
try:
self.pos = int(re.findall('[A-Z*](\d+)', aa_hgvs)[0])
if self.is_premature_stop_codon:
self.stop_pos = int(re.findall('\*>?(\d+)$', aa_hgvs)[0])
else:
self.stop_pos = None
except IndexError:
# unconventional usage of indicating frameshifts will cause
# index errors. For example, in some cases 'fs' is not used.
# In other cases, either amino acids were not included or
# just designated as a '?'
self.logger.debug('(Parsing-Problem) frame shift hgvs string: "%s"' % aa_hgvs)
self.pos = None
self.stop_pos = None
self.is_missing_info = True
elif self.is_nonsense_mutation:
self.initial = aa_hgvs[0]
self.mutated = '*' # there is actually a stop codon
self.stop_pos = 0 # indicates same position is stop codon
try:
self.pos = int(aa_hgvs[1:-1])
except ValueError:
# wierd error of p.E217>D*
self.is_valid = False
self.pos = None
self.logger.debug('(Parsing-Problem) Invalid HGVS Amino Acid '
'syntax: ' + aa_hgvs)
if self.initial == self.mutated:
# classify nonsense-to-nonsense mutations as synonymous
self.is_synonymous = True
self.is_non_silent = False
else:
self.is_valid = False # did not match any of the possible cases
self.logger.debug('(Parsing-Problem) Invalid HGVS Amino Acid '
'syntax: ' + aa_hgvs)
|
class AminoAcid(object):
''' The AminoAcid class represents aa changes in the Cosmic Database.
The AminoAcid class follows the syntax of HGVS
(http://www.hgvs.org/mutnomen/recs-prot.html). Although the parsing
generally follows the HGVS syntax, it does have slight variations where
the COSMIC database uses idiosyncratic conventions.
The AminoAcid class is only intended to be used in the following way::
>>> aa = AminoAcid('p.A267C')
>>> aa.pos
267
>>> aa.is_missense
True
Namely, the constructor parses the necessary HGVS string and then extracts
attributes that can be used.
'''
def __init__(self, hgvs='', occurrence=1):
pass
def set_mutation_type(self, mut_type=''):
'''Sets the mutation type attribute to a single label based on
attribute flags.
Kwargs:
mut_type (str): value to set self.mut_type
'''
pass
def set_occurrence(self, occur):
pass
def set_amino_acid(self, aa):
'''Set amino acid change and position.'''
pass
def __set_mutation_status(self):
pass
def __set_mutation_type(self, hgvs_string):
'''Interpret the mutation type (missense, etc.) and set appropriate flags.
Args:
hgvs_string (str): hgvs syntax with "p." removed
'''
pass
def __set_missense_status(self, hgvs_string):
'''Sets the self.is_missense flag.'''
pass
def __set_lost_start_status(self, hgvs_string):
'''Sets the self.is_lost_start flag.'''
pass
def __set_frame_shift_status(self):
'''Check for frame shift and set the self.is_frame_shift flag.'''
pass
def __set_lost_stop_status(self, hgvs_string):
'''Check if the stop codon was mutated to something other than
a stop codon.'''
pass
def __set_premature_stop_codon_status(self, hgvs_string):
'''Set whether there is a premature stop codon.'''
pass
def __set_indel_status(self):
'''Sets flags related to the mutation being an indel.'''
pass
def __set_unkown_effect(self, hgvs_string):
'''Sets a flag for unkown effect according to HGVS syntax. The
COSMIC database also uses unconventional questionmarks to denote
missing information.
Args:
hgvs_string (str): hgvs syntax with "p." removed
'''
pass
def __set_no_protein(self, hgvs_string):
'''Set a flag for no protein expected. ("p.0" or "p.0?")
Args:
hgvs_string (str): hgvs syntax with "p." removed
'''
pass
def __parse_hgvs_syntax(self, aa_hgvs):
'''Convert HGVS syntax for amino acid change into attributes.
Specific details of the mutation are stored in attributes like
self.intial (prior to mutation), sel.pos (mutation position),
self.mutated (mutation), and self.stop_pos (position of stop codon,
if any).
Args:
aa_hgvs (str): amino acid string following HGVS syntax
'''
pass
| 16 | 13 | 20 | 1 | 15 | 6 | 4 | 0.51 | 1 | 7 | 0 | 0 | 15 | 24 | 15 | 15 | 337 | 30 | 219 | 46 | 203 | 111 | 178 | 46 | 162 | 18 | 1 | 3 | 62 |
142,192 |
KarchinLab/probabilistic2020
|
KarchinLab_probabilistic2020/prob2020/python/bed_line.py
|
prob2020.python.bed_line.BedLine
|
class BedLine(object):
"""The BedLine class parses a single line in a BED file.
A BED file line is parsed into object attributes within the constructor.
Genomic positions can also be queried against the BedLine object to retreive
a relative position along the CDS.
Example
-------
>>> bline_str = "chr3 41240941 41281939 CTNNB1 0 + 41265559 41280833 0 16 220,61,228,254,239,202,145,104,339,159,120,151,122,61,221,630, 0,24570,25075,25503,25883,26209,27757,33890,34078,34688,36273,36898,37137,38565,39683,40368,"
>>> bed = BedLine(bline_str)
>>> bed.chrom
'chr3'
>>> bed.strand
'+'
>>> bed.query_position('+', 'chr3', 41265559)
0
"""
def __init__(self, line):
# make input a list of strings
if type(line) is str:
line = line.split('\t')
elif type(line) is list:
pass
else:
raise ValueError('Expected either a string or a list of strings')
# bed tuple maintains the orginal data from the bed line
tmp = dict(zip(BedTuple._fields, line))
self.bed_tuple = BedTuple(**tmp)
# convenience attributes
self.gene_name = self.bed_tuple.name
self.chrom = self.bed_tuple.chrom
self.chrom_start = int(self.bed_tuple.chromStart)
self.strand = self.bed_tuple.strand
# set exons
self._init_exons()
def _filter_utr(self, ex):
"""Filter out UTR regions from the exon list (ie retain only coding regions).
Coding regions are defined by the thickStart and thickEnd attributes.
Parameters
----------
ex : list of tuples
list of exon positions, [(ex1_start, ex1_end), ...]
Returns
-------
filtered_exons : list of tuples
exons with UTR regions "chopped" out
"""
# define coding region
coding_start = int(self.bed_tuple.thickStart)
coding_end = int(self.bed_tuple.thickEnd)
if (coding_end - coding_start) < 3:
# coding regions should have at least one codon, otherwise the
# region is invalid and does not indicate an actually coding region
logger.debug('{0} has an invalid coding region specified by thickStart '
'and thickEnd (only {1} bps long). This gene is possibly either '
'a non-coding transcript or a pseudo gene.'.format(self.gene_name,
coding_end-coding_start))
return []
filtered_exons = []
for exon in ex:
if exon[0] > coding_end and exon[1] > coding_end:
# exon has no coding region
pass
elif exon[0] < coding_start and exon[1] < coding_start:
# exon has no coding region
pass
elif exon[0] <= coding_start and exon[1] >= coding_end:
# coding region entirely contained within one exon
filtered_exons.append((coding_start, coding_end))
elif exon[0] <= coding_start and exon[1] < coding_end:
# only beginning of exon contains UTR
filtered_exons.append((coding_start, exon[1]))
elif exon[0] > coding_start and exon[1] >= coding_end:
# only end part of exon contains UTR
filtered_exons.append((exon[0], coding_end))
elif exon[0] > coding_start and exon[1] < coding_end:
# entire exon is coding
filtered_exons.append(exon)
else:
# exon is only a UTR
pass
return filtered_exons
def _init_exons(self):
"""Sets a list of position intervals for each exon.
Only coding regions as defined by thickStart and thickEnd are kept.
Exons are stored in the self.exons attribute.
"""
exon_starts = [self.chrom_start + int(s)
for s in self.bed_tuple.blockStarts.strip(',').split(',')]
exon_sizes = list(map(int, self.bed_tuple.blockSizes.strip(',').split(',')))
# get chromosome intervals
exons = [(exon_starts[i], exon_starts[i] + exon_sizes[i])
for i in range(len(exon_starts))]
no_utr_exons = self._filter_utr(exons)
self.exons = no_utr_exons
self.exon_lens = [e[1] - e[0] for e in self.exons]
self.num_exons = len(self.exons)
self.cds_len = sum(self.exon_lens)
self.five_ss_len = 2*(self.num_exons-1)
self.three_ss_len = 2*(self.num_exons-1)
self._init_splice_site_pos()
def _init_splice_site_pos(self):
# dictionary mapping internal position format to position
# in list of 5'/3' splice sites
self.pos2ss = {}
tmp_pos = self.cds_len
# init 5' splice site positions
for i in range(self.num_exons-1):
self.pos2ss[tmp_pos] = ("5'", i, 1)
self.pos2ss[tmp_pos+1] = ("5'", i, 2)
tmp_pos+=2
# init 3' splice site positions
for i in range(self.num_exons-1):
self.pos2ss[tmp_pos] = ("3'", i, 1)
self.pos2ss[tmp_pos+1] = ("3'", i, 2)
tmp_pos+=2
def get_exons(self):
"""Returns the list of exons that have UTR regions filtered out."""
return self.exons
def get_num_exons(self):
"""Returns the number of exons (not including UTR exons)."""
return self.num_exons
def init_genome_coordinates(self) :
"""Creates the self.seqpos2genome dictionary that converts positions
relative to the sequence to genome coordinates."""
self.seqpos2genome = {}
# record genome positions for each sequence position
seq_pos = 0
for estart, eend in self.exons:
for genome_pos in range(estart, eend):
if self.strand == '+':
self.seqpos2genome[seq_pos] = genome_pos
elif self.strand == '-':
tmp = self.cds_len - seq_pos - 1
self.seqpos2genome[tmp] = genome_pos
seq_pos += 1
# recode 5' splice site locations
for i in range(0, self.five_ss_len):
seq_pos = self.cds_len + i
ss_ix = i // 2 # the ss_ix'th 5'ss starting from upstream tx
pos_in_ss = i % 2 # whether first/second nuc in splice site
# determine genome coordinates for 5' splice site
if self.strand == '+':
self.seqpos2genome[seq_pos] = self.exons[ss_ix][1] + pos_in_ss
else:
exon_pos = -1 - ss_ix
self.seqpos2genome[seq_pos] = self.exons[exon_pos][0] - pos_in_ss - 1
# recode 3' splice site locations
for i in range(0, self.three_ss_len):
seq_pos = self.cds_len + self.five_ss_len + i
ss_ix = i // 2 # the ss_ix'th 3'ss starting from upstream tx
pos_in_ss = i % 2 # whether first/second nuc in splice site
# determine genome coordinates for 3' splice site
if self.strand == '+':
self.seqpos2genome[seq_pos] = self.exons[ss_ix+1][0] - 2 + pos_in_ss
else:
exon_pos = -1 - ss_ix
self.seqpos2genome[seq_pos] = self.exons[exon_pos-1][1] + 1 - pos_in_ss
def query_position(self, strand, chr, genome_coord):
"""Provides the relative position on the coding sequence for a given
genomic position.
Parameters
----------
chr : str
chromosome, provided to check validity of query
genome_coord : int
0-based position for mutation, actually used to get relative coding pos
Returns
-------
pos : int or None
position of mutation in coding sequence, returns None if mutation
does not match region found in self.exons
"""
# first check if valid
pos = None # initialize to invalid pos
if chr != self.chrom:
#logger.debug('Wrong chromosome queried. You provided {0} but gene is '
#'on {1}.'.format(chr, self.chrom))
# return pos
pass
if type(genome_coord) is list:
# handle case for indels
pos_left = self.query_position(strand, chr, genome_coord[0])
pos_right = self.query_position(strand, chr, genome_coord[1])
if pos_left is not None or pos_right is not None:
return [pos_left, pos_right]
else:
return None
# return position if contained within coding region or splice site
for i, (estart, eend) in enumerate(self.exons):
# in coding region
if estart <= genome_coord < eend:
if strand == '+':
prev_lens = sum(self.exon_lens[:i]) # previous exon lengths
pos = prev_lens + (genome_coord - estart)
elif strand == '-':
prev_lens = sum(self.exon_lens[:i]) # previous exon lengths
pos = prev_lens + (genome_coord - estart)
pos = self.cds_len - pos - 1 # flip coords because neg strand
return pos
# in splice site
elif (eend <= genome_coord < eend + 2) and i != self.num_exons-1:
if strand == '+':
pos = self.cds_len + 2*i + (genome_coord - eend)
elif strand == '-':
pos = self.cds_len + self.five_ss_len + 2*(self.num_exons-(i+2)) + (genome_coord - eend)
return pos
# in splice site
elif (estart - 2 <= genome_coord < estart) and i != 0:
if strand == '-':
pos = self.cds_len + 2*(self.num_exons-(i+2)) + (genome_coord - (estart - 2))
elif strand == '+':
pos = self.cds_len + self.five_ss_len + 2*(i-1) + (genome_coord - (estart - 2))
return pos
return pos
|
class BedLine(object):
'''The BedLine class parses a single line in a BED file.
A BED file line is parsed into object attributes within the constructor.
Genomic positions can also be queried against the BedLine object to retreive
a relative position along the CDS.
Example
-------
>>> bline_str = "chr3 41240941 41281939 CTNNB1 0 + 41265559 41280833 0 16 220,61,228,254,239,202,145,104,339,159,120,151,122,61,221,630, 0,24570,25075,25503,25883,26209,27757,33890,34078,34688,36273,36898,37137,38565,39683,40368,"
>>> bed = BedLine(bline_str)
>>> bed.chrom
'chr3'
>>> bed.strand
'+'
>>> bed.query_position('+', 'chr3', 41265559)
0
'''
def __init__(self, line):
pass
def _filter_utr(self, ex):
'''Filter out UTR regions from the exon list (ie retain only coding regions).
Coding regions are defined by the thickStart and thickEnd attributes.
Parameters
----------
ex : list of tuples
list of exon positions, [(ex1_start, ex1_end), ...]
Returns
-------
filtered_exons : list of tuples
exons with UTR regions "chopped" out
'''
pass
def _init_exons(self):
'''Sets a list of position intervals for each exon.
Only coding regions as defined by thickStart and thickEnd are kept.
Exons are stored in the self.exons attribute.
'''
pass
def _init_splice_site_pos(self):
pass
def get_exons(self):
'''Returns the list of exons that have UTR regions filtered out.'''
pass
def get_num_exons(self):
'''Returns the number of exons (not including UTR exons).'''
pass
def init_genome_coordinates(self) :
'''Creates the self.seqpos2genome dictionary that converts positions
relative to the sequence to genome coordinates.'''
pass
def query_position(self, strand, chr, genome_coord):
'''Provides the relative position on the coding sequence for a given
genomic position.
Parameters
----------
chr : str
chromosome, provided to check validity of query
genome_coord : int
0-based position for mutation, actually used to get relative coding pos
Returns
-------
pos : int or None
position of mutation in coding sequence, returns None if mutation
does not match region found in self.exons
'''
pass
| 9 | 7 | 27 | 3 | 17 | 9 | 5 | 0.67 | 1 | 10 | 0 | 0 | 8 | 13 | 8 | 8 | 247 | 33 | 133 | 46 | 124 | 89 | 111 | 46 | 102 | 14 | 1 | 3 | 41 |
142,193 |
KarchinLab/probabilistic2020
|
KarchinLab_probabilistic2020/prob2020/python/gene_sequence.py
|
prob2020.python.gene_sequence.GeneSequence
|
class GeneSequence(object):
def __init__(self, fasta_obj,
nuc_context=1.5):
self.fasta = fasta_obj
self.nuc_context = nuc_context
def set_gene(self, bed_line):
"""Updates gene sequence for a new gene (bed line).
Parameters
----------
bed_line : BedLine
BedLine object representing a single gene in a BED file
"""
self.bed = bed_line # gene that was specified as BED
self._reset_seq() # fetch sequence for bed line
def _reset_seq(self):
"""Updates attributes for gene represented in the self.bed attribute.
Sequences are always upper case.
"""
exon_seq_list, five_ss_seq_list, three_ss_seq_list = self._fetch_seq()
self.exon_seq = ''.join(exon_seq_list)
self.three_prime_seq = three_ss_seq_list
self.five_prime_seq = five_ss_seq_list
self._to_upper() # make sure all sequences are in upper case
def add_germline_variants(self, germline_nucs, coding_pos):
"""Add potential germline variants into the nucleotide sequence.
Sequenced individuals may potentially have a SNP at a somatic mutation position.
Therefore they may differ from the reference genome. This method updates the gene
germline gene sequence to match the actual individual.
Parameters
----------
germline_nucs : list of str
list of DNA nucleotides containing the germline letter
coding_pos : int
0-based nucleotide position in coding sequence
NOTE: the self.exon_seq attribute is updated, no return value
"""
if len(germline_nucs) != len(coding_pos):
raise ValueError('Each germline nucleotide should have a coding position')
es = list(self.exon_seq)
for i in range(len(germline_nucs)):
gl_nuc, cpos = germline_nucs[i].upper(), coding_pos[i]
if not utils.is_valid_nuc(gl_nuc):
raise ValueError('{0} is not a valid nucleotide'.format(gl_nuc))
if cpos >= 0:
es[cpos] = gl_nuc
self.exon_seq = ''.join(es)
def _to_upper(self):
"""Convert sequences to upper case."""
self.exon_seq = self.exon_seq.upper()
self.three_prime_seq = [s.upper() for s in self.three_prime_seq]
self.five_prime_seq = [s.upper() for s in self.five_prime_seq]
def _fetch_seq(self):
"""Fetches gene sequence from PySAM fasta object.
Returns
-------
exons : list of str
list of exon nucleotide sequences
five_prime_ss : list of str
list of 5' splice site sequences
three_prime_ss : list of str
list of 3' splice site sequences
"""
exons = []
three_prime_ss = []
five_prime_ss = []
num_exons = self.bed.get_num_exons()
for i in range(num_exons):
# add exon sequence
tmp_id = '{0};exon{1}'.format(self.bed.gene_name, i)
tmp_exon = self.fasta.fetch(reference=tmp_id)
exons.append(tmp_exon)
# add splice site sequence
tmp_id_3ss = '{0};3SS'.format(tmp_id)
tmp_id_5ss = '{0};5SS'.format(tmp_id)
if num_exons == 1:
pass
elif i == 0:
tmp_5ss = self.fasta.fetch(tmp_id_5ss)
five_prime_ss.append(tmp_5ss)
elif i == (num_exons - 1):
tmp_3ss = self.fasta.fetch(tmp_id_3ss)
three_prime_ss.append(tmp_3ss)
else:
tmp_3ss = self.fasta.fetch(tmp_id_3ss)
tmp_5ss = self.fasta.fetch(tmp_id_5ss)
three_prime_ss.append(tmp_3ss)
five_prime_ss.append(tmp_5ss)
return exons, five_prime_ss, three_prime_ss
|
class GeneSequence(object):
def __init__(self, fasta_obj,
nuc_context=1.5):
pass
def set_gene(self, bed_line):
'''Updates gene sequence for a new gene (bed line).
Parameters
----------
bed_line : BedLine
BedLine object representing a single gene in a BED file
'''
pass
def _reset_seq(self):
'''Updates attributes for gene represented in the self.bed attribute.
Sequences are always upper case.
'''
pass
def add_germline_variants(self, germline_nucs, coding_pos):
'''Add potential germline variants into the nucleotide sequence.
Sequenced individuals may potentially have a SNP at a somatic mutation position.
Therefore they may differ from the reference genome. This method updates the gene
germline gene sequence to match the actual individual.
Parameters
----------
germline_nucs : list of str
list of DNA nucleotides containing the germline letter
coding_pos : int
0-based nucleotide position in coding sequence
NOTE: the self.exon_seq attribute is updated, no return value
'''
pass
def _to_upper(self):
'''Convert sequences to upper case.'''
pass
def _fetch_seq(self):
'''Fetches gene sequence from PySAM fasta object.
Returns
-------
exons : list of str
list of exon nucleotide sequences
five_prime_ss : list of str
list of 5' splice site sequences
three_prime_ss : list of str
list of 3' splice site sequences
'''
pass
| 7 | 5 | 16 | 1 | 9 | 6 | 2 | 0.69 | 1 | 3 | 0 | 0 | 6 | 6 | 6 | 6 | 102 | 14 | 54 | 29 | 46 | 37 | 50 | 28 | 43 | 5 | 1 | 2 | 14 |
142,194 |
KarchinLab/probabilistic2020
|
KarchinLab_probabilistic2020/prob2020/python/sequence_context.py
|
prob2020.python.sequence_context.SequenceContext
|
class SequenceContext(object):
"""The SequenceContext class allows for deciphering sequence context
and for randomly permuting mutation positions while respecting sequence context.
"""
def __init__(self, gene_seq, seed=None):
self._init_context(gene_seq)
self.seed = seed # seed for random number generator
context_names = prob2020.python.mutation_context.get_all_context_names(gene_seq.nuc_context)
self.prng_dict = {
c: np.random.RandomState(seed=self.seed)
for c in context_names
}
self.prng_dict['N'] = np.random.RandomState(seed=self.seed)
def _init_context(self, gene_seq):
"""Initializes attributes defining mutation contexts and their position.
The self.context2pos and self.pos2context dictionaries map from
sequence context to sequence position and sequence position to
sequence context, respectively. These attributes allow for randomly
sampling of mutation positions while respecting sequence context in the
randomization-based test.
Parameters
----------
gene_seq : GeneSequence
GeneSequence object from the gene_sequence module
"""
self.context2pos, self.pos2context = {}, {}
gene_len = len(gene_seq.exon_seq) # get length of CDS
five_ss_len = 2*len(gene_seq.five_prime_seq) # total length of 5' splice sites
three_ss_len = 2*len(gene_seq.three_prime_seq) # total length of 3' splice sites
if gene_seq.nuc_context in [1, 2]:
# case where context matters
index_context = int(gene_seq.nuc_context) - 1 # subtract 1 since python is zero-based index
for i in range(index_context, gene_len):
nucs = gene_seq.exon_seq[i-index_context:i+1]
self.context2pos.setdefault(nucs, [])
self.context2pos[nucs].append(i)
self.pos2context[i] = nucs
# sequence context for five prime splice site
for i, five_ss in enumerate(gene_seq.five_prime_seq):
first_nucs = five_ss[1-index_context:1+1]
second_nucs = five_ss[2-index_context:2+1]
first_pos = 2*i + gene_len
second_pos = 2*i + gene_len + 1
self.context2pos.setdefault(first_nucs, [])
self.context2pos[first_nucs].append(first_pos)
self.context2pos.setdefault(second_nucs, [])
self.context2pos[second_nucs].append(second_pos)
self.pos2context[first_pos] = first_nucs
self.pos2context[second_pos] = second_nucs
# sequence context for three prime splice site
for i, three_ss in enumerate(gene_seq.three_prime_seq):
first_nucs = three_ss[1-index_context:1+1]
second_nucs = three_ss[2-index_context:2+1]
first_pos = 2*i + gene_len + five_ss_len
second_pos = 2*i + gene_len + five_ss_len + 1
self.context2pos.setdefault(first_nucs, [])
self.context2pos[first_nucs].append(first_pos)
self.context2pos.setdefault(second_nucs, [])
self.context2pos[second_nucs].append(second_pos)
self.pos2context[first_pos] = first_nucs
self.pos2context[second_pos] = second_nucs
# hack solution for context for first nuc
if gene_seq.exon_seq and gene_seq.nuc_context > 1:
self.pos2context[0] = gene_seq.exon_seq[0] * 2
self.context2pos.setdefault(gene_seq.exon_seq[0]*2, [])
self.context2pos[gene_seq.exon_seq[0]*2].append(0)
elif gene_seq.nuc_context in [1.5, 3]:
# use the nucleotide context from chasm if nuc
# context is 1.5 otherwise always use a three
# nucleotide context
ncontext = gene_seq.nuc_context
for i in range(1, len(gene_seq.exon_seq)-1):
nucs = gene_seq.exon_seq[i-1:i+2]
if ncontext == 1.5:
context = prob2020.python.mutation_context.get_chasm_context(nucs)
else:
context = nucs
self.context2pos.setdefault(context, [])
self.context2pos[context].append(i)
self.pos2context[i] = context
# sequence context for five prime splice site
for i, five_ss in enumerate(gene_seq.five_prime_seq):
first_nucs = five_ss[:3]
second_nucs = five_ss[1:4]
first_pos = 2*i + gene_len
second_pos = 2*i + gene_len + 1
if ncontext == 1.5:
first_context = prob2020.python.mutation_context.get_chasm_context(first_nucs)
second_context = prob2020.python.mutation_context.get_chasm_context(second_nucs)
else:
first_context = first_nucs
second_context = second_nucs
self.context2pos.setdefault(first_context, [])
self.context2pos[first_context].append(first_pos)
self.context2pos.setdefault(second_context, [])
self.context2pos[second_context].append(second_pos)
self.pos2context[first_pos] = first_context
self.pos2context[second_pos] = second_context
# sequence context for three prime splice site
for i, three_ss in enumerate(gene_seq.three_prime_seq):
first_nucs = three_ss[:3]
second_nucs = three_ss[1:4]
first_pos = 2*i + gene_len + five_ss_len
second_pos = 2*i + gene_len + five_ss_len + 1
if ncontext == 1.5:
first_context = prob2020.python.mutation_context.get_chasm_context(first_nucs)
second_context = prob2020.python.mutation_context.get_chasm_context(second_nucs)
else:
first_context = first_nucs
second_context = second_nucs
self.context2pos.setdefault(first_context, [])
self.context2pos[first_context].append(first_pos)
self.context2pos.setdefault(second_context, [])
self.context2pos[second_context].append(second_pos)
self.pos2context[first_pos] = first_context
self.pos2context[second_pos] = second_context
# hack solution for context for first nuc
if gene_seq.exon_seq:
first_nuc = gene_seq.exon_seq[0] + gene_seq.exon_seq[:2]
if ncontext == 1.5:
first_context = prob2020.python.mutation_context.get_chasm_context(first_nuc)
else:
first_context = first_nuc
self.pos2context[0] = first_context
self.context2pos.setdefault(first_context, [])
self.context2pos[first_context].append(0)
last_nuc = gene_seq.exon_seq[-2:] + gene_seq.exon_seq[-1]
if ncontext == 1.5:
last_context = prob2020.python.mutation_context.get_chasm_context(last_nuc)
else:
last_context = last_nuc
last_pos = len(gene_seq.exon_seq) - 1
self.pos2context[last_pos] = first_context
self.context2pos.setdefault(last_context, [])
self.context2pos[last_context].append(last_pos)
else:
# case where there is no context,
# mutations occur with uniform probability at each
# position
for i in range(gene_len + five_ss_len + three_ss_len):
self.pos2context[i] = 'None'
self.context2pos['None'] = range(gene_len + five_ss_len + three_ss_len)
def is_valid_context(self, ctxt):
"""Checks if provided context is valid (previously seen).
Parameters
----------
ctxt : str
mutation context
"""
return ctxt in self.context2pos
def random_context_pos(self, num, num_permutations, context):
"""Samples with replacement available positions matching the
sequence context.
Note: this method does random sampling only for an individual
sequence context.
Parameters
----------
num : int
Number of positions to sample for each permutation. This
is the number of actually observed mutations having the
matching sequence context for this gene.
num_permutations : int
Number of permutations for permutation test.
context : str
Sequence context.
Returns
-------
random_pos : np.array
num_permutations X num sized array that represents the
randomly sampled positions for a specific context.
"""
# make sure provide context is valid
if not self.is_valid_context(context):
error_msg = 'Context ({0}) was never seen in sequence.'.format(context)
raise ValueError(error_msg)
# make sure sampling is a positive integer
if num < 1:
error_msg = ('There must be at least one sample (specified {0}) '
'for a context'.format(num))
raise ValueError(error_msg)
# randomly select from available positions that fit the specified context
available_pos = self.context2pos[context]
random_pos = self.prng_dict[context].choice(available_pos, (num_permutations, num))
return random_pos
def random_pos(self, context_iterable, num_permutations):
"""Obtains random positions w/ replacement which match sequence context.
Parameters
----------
context_iterable: iterable containing two element tuple
Records number of mutations in each context. context_iterable
should be something like [('AA', 5), ...].
num_permutations : int
Number of permutations used in the permutation test.
Returns
-------
position_list : list
Contains context string and the randomly chosen positions
for that context.
"""
position_list = []
for contxt, n in context_iterable:
pos_array = self.random_context_pos(n, num_permutations, contxt)
position_list.append([contxt, pos_array])
return position_list
|
class SequenceContext(object):
'''The SequenceContext class allows for deciphering sequence context
and for randomly permuting mutation positions while respecting sequence context.
'''
def __init__(self, gene_seq, seed=None):
pass
def _init_context(self, gene_seq):
'''Initializes attributes defining mutation contexts and their position.
The self.context2pos and self.pos2context dictionaries map from
sequence context to sequence position and sequence position to
sequence context, respectively. These attributes allow for randomly
sampling of mutation positions while respecting sequence context in the
randomization-based test.
Parameters
----------
gene_seq : GeneSequence
GeneSequence object from the gene_sequence module
'''
pass
def is_valid_context(self, ctxt):
'''Checks if provided context is valid (previously seen).
Parameters
----------
ctxt : str
mutation context
'''
pass
def random_context_pos(self, num, num_permutations, context):
'''Samples with replacement available positions matching the
sequence context.
Note: this method does random sampling only for an individual
sequence context.
Parameters
----------
num : int
Number of positions to sample for each permutation. This
is the number of actually observed mutations having the
matching sequence context for this gene.
num_permutations : int
Number of permutations for permutation test.
context : str
Sequence context.
Returns
-------
random_pos : np.array
num_permutations X num sized array that represents the
randomly sampled positions for a specific context.
'''
pass
def random_pos(self, context_iterable, num_permutations):
'''Obtains random positions w/ replacement which match sequence context.
Parameters
----------
context_iterable: iterable containing two element tuple
Records number of mutations in each context. context_iterable
should be something like [('AA', 5), ...].
num_permutations : int
Number of permutations used in the permutation test.
Returns
-------
position_list : list
Contains context string and the randomly chosen positions
for that context.
'''
pass
| 6 | 5 | 43 | 3 | 27 | 14 | 5 | 0.56 | 1 | 4 | 0 | 0 | 5 | 4 | 5 | 5 | 224 | 20 | 134 | 36 | 128 | 75 | 123 | 36 | 117 | 17 | 1 | 3 | 24 |
142,195 |
KarrLab/nose2unitth
|
KarrLab_nose2unitth/nose2unitth/core.py
|
nose2unitth.core.Converter
|
class Converter(object):
""" Convert nose-style test reports to UnitTH-style test reports by splitting modules into separate XML files """
@staticmethod
def run(in_file_nose, out_dir_unitth):
""" Convert nose-style test reports to UnitTH-style test reports by splitting modules into separate XML files
Args:
in_file_nose (:obj:`str`): path to nose-style test report
out_file_unitth (:obj:`str`): path to save UnitTH-style test reports
"""
suites = Converter.read_nose(in_file_nose)
Converter.write_unitth(suites, out_dir_unitth)
@staticmethod
def read_nose(in_file):
""" Parse nose-style test reports into a `dict`
Args:
in_file (:obj:`str`): path to nose-style test report
Returns:
:obj:`dict`: dictionary of test suites
"""
suites = {}
doc_xml = minidom.parse(in_file)
suite_xml = doc_xml.getElementsByTagName("testsuite")[0]
for case_xml in suite_xml.getElementsByTagName('testcase'):
classname = case_xml.getAttribute('classname')
if classname not in suites:
suites[classname] = []
case = {
'name': case_xml.getAttribute('name'),
'time': float(case_xml.getAttribute('time')),
}
skipped_xml = case_xml.getElementsByTagName('skipped')
if skipped_xml:
if skipped_xml[0].hasAttribute('type'):
type = skipped_xml[0].getAttribute('type')
else:
type = ''
case['skipped'] = {
'type': type,
'message': skipped_xml[0].getAttribute('message'),
'text': "".join([child.nodeValue for child in skipped_xml[0].childNodes]),
}
failure_xml = case_xml.getElementsByTagName('failure')
if failure_xml:
if failure_xml[0].hasAttribute('type'):
type = failure_xml[0].getAttribute('type')
else:
type = ''
case['failure'] = {
'type': type,
'message': failure_xml[0].getAttribute('message'),
'text': "".join([child.nodeValue for child in failure_xml[0].childNodes]),
}
error_xml = case_xml.getElementsByTagName('error')
if error_xml:
if error_xml[0].hasAttribute('type'):
type = error_xml[0].getAttribute('type')
else:
type = ''
case['error'] = {
'type': type,
'message': error_xml[0].getAttribute('message'),
'text': "".join([child.nodeValue for child in error_xml[0].childNodes]),
}
suites[classname].append(case)
return suites
@staticmethod
def write_unitth(suites, out_dir):
""" Write UnitTH-style test reports
Args:
suites (:obj:`dict`): dictionary of test suites
out_dir (:obj:`str`): path to save UnitTH-style test reports
"""
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
for classname, cases in suites.items():
doc_xml = minidom.Document()
suite_xml = doc_xml.createElement('testsuite')
suite_xml.setAttribute('name', classname)
suite_xml.setAttribute('tests', str(len(cases)))
suite_xml.setAttribute('errors', str(sum('error' in case for case in cases)))
suite_xml.setAttribute('failures', str(sum('failure' in case for case in cases)))
suite_xml.setAttribute('skipped', str(sum('skipped' in case for case in cases)))
suite_xml.setAttribute('time', '{:.3f}'.format(sum(case['time'] for case in cases)))
doc_xml.appendChild(suite_xml)
for case in cases:
case_xml = doc_xml.createElement('testcase')
case_xml.setAttribute('classname', classname)
case_xml.setAttribute('name', case['name'])
case_xml.setAttribute('time', '{:.3f}'.format(case['time']))
suite_xml.appendChild(case_xml)
if 'skipped' in case:
skipped_xml = doc_xml.createElement('skipped')
skipped_xml.setAttribute('type', case['skipped']['type'])
skipped_xml.setAttribute('message', case['skipped']['message'])
case_xml.appendChild(skipped_xml)
skipped_text_xml = doc_xml.createCDATASection(case['skipped']['text'])
skipped_xml.appendChild(skipped_text_xml)
if 'failure' in case:
failure_xml = doc_xml.createElement('failure')
failure_xml.setAttribute('type', case['failure']['type'])
failure_xml.setAttribute('message', case['failure']['message'])
case_xml.appendChild(failure_xml)
failure_text_xml = doc_xml.createCDATASection(case['failure']['text'])
failure_xml.appendChild(failure_text_xml)
if 'error' in case:
error_xml = doc_xml.createElement('error')
error_xml.setAttribute('type', case['error']['type'])
error_xml.setAttribute('message', case['error']['message'])
case_xml.appendChild(error_xml)
error_text_xml = doc_xml.createCDATASection(case['error']['text'])
error_xml.appendChild(error_text_xml)
with open(os.path.join(out_dir, '{}.xml'.format(classname)), 'w') as output:
doc_xml.writexml(output, encoding='utf-8', addindent='', newl="")
doc_xml.unlink()
|
class Converter(object):
''' Convert nose-style test reports to UnitTH-style test reports by splitting modules into separate XML files '''
@staticmethod
def run(in_file_nose, out_dir_unitth):
''' Convert nose-style test reports to UnitTH-style test reports by splitting modules into separate XML files
Args:
in_file_nose (:obj:`str`): path to nose-style test report
out_file_unitth (:obj:`str`): path to save UnitTH-style test reports
'''
pass
@staticmethod
def read_nose(in_file):
''' Parse nose-style test reports into a `dict`
Args:
in_file (:obj:`str`): path to nose-style test report
Returns:
:obj:`dict`: dictionary of test suites
'''
pass
@staticmethod
def write_unitth(suites, out_dir):
''' Write UnitTH-style test reports
Args:
suites (:obj:`dict`): dictionary of test suites
out_dir (:obj:`str`): path to save UnitTH-style test reports
'''
pass
| 7 | 4 | 43 | 6 | 31 | 5 | 6 | 0.18 | 1 | 3 | 0 | 0 | 0 | 0 | 3 | 3 | 136 | 22 | 97 | 29 | 90 | 17 | 76 | 25 | 72 | 9 | 1 | 3 | 17 |
142,196 |
KarrLab/nose2unitth
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/KarrLab_nose2unitth/nose2unitth/__main__.py
|
nose2unitth.__main__.App.Meta
|
class Meta:
label = 'nose2unitth'
base_controller = 'base'
handlers = [BaseController]
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
142,197 |
KarrLab/nose2unitth
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/KarrLab_nose2unitth/nose2unitth/__main__.py
|
nose2unitth.__main__.BaseController
|
class BaseController(cement.Controller):
""" Base controller for command line application """
class Meta:
label = 'base'
description = "Convert nose-style test reports into UnitTH-style test reports"
arguments = [
(['-v', '--version'], dict(action='version',
version=nose2unitth.__version__)),
(['in_file_nose'], dict(type=str,
help='path to nose test report that should be converted')),
(['out_dir_unitth'], dict(type=str,
help='path where converted test report should be saved')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
Converter.run(args.in_file_nose, args.out_dir_unitth)
|
class BaseController(cement.Controller):
''' Base controller for command line application '''
class Meta:
@cement.ex(hide=True)
def _default(self):
pass
| 4 | 1 | 3 | 0 | 3 | 0 | 1 | 0.08 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 1 | 16 | 2 | 13 | 8 | 9 | 1 | 8 | 7 | 5 | 1 | 1 | 0 | 1 |
142,198 |
KarrLab/nose2unitth
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/KarrLab_nose2unitth/nose2unitth/__main__.py
|
nose2unitth.__main__.BaseController.Meta
|
class Meta:
label = 'base'
description = "Convert nose-style test reports into UnitTH-style test reports"
arguments = [
(['-v', '--version'], dict(action='version',
version=nose2unitth.__version__)),
(['in_file_nose'], dict(type=str,
help='path to nose test report that should be converted')),
(['out_dir_unitth'], dict(type=str,
help='path where converted test report should be saved')),
]
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0 | 8 | 4 | 7 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
142,199 |
KarrLab/nose2unitth
|
KarrLab_nose2unitth/tests/test_nose2unitth.py
|
test_nose2unitth.TestNose2UnitTH
|
class TestNose2UnitTH(unittest.TestCase):
def test_read_nose(self):
report = Converter.read_nose(NOSE_FIXTURE_FILE_NAME)
for key, val in NOSE_FIXTURE_OBJ.items():
self.assertEqual(NOSE_FIXTURE_OBJ[key], report[key])
self.assertEqual(NOSE_FIXTURE_OBJ, report)
def test_write_unitth(self):
out_dir = tempfile.mkdtemp()
shutil.rmtree(out_dir)
Converter.write_unitth(NOSE_FIXTURE_OBJ, out_dir)
# 1 XML file per suite
self.assertEqual(set(NOSE_FIXTURE_OBJ.keys()), set([x.replace('.xml', '') for x in os.listdir(out_dir)]))
# XML files have tests
for suite_name in NOSE_FIXTURE_OBJ:
with open(os.path.join(out_dir, '{}.xml'.format(suite_name)), 'r') as report:
self.assertEqual(NOSE_FIXTURE_XML[suite_name], report.read())
# cleanup
shutil.rmtree(out_dir)
def test_cli(self):
out_dir = tempfile.mkdtemp()
with nose2unitth_cli(argv=[NOSE_FIXTURE_FILE_NAME, out_dir]) as app:
app.run()
# 1 XML file per suite
self.assertEqual(set(NOSE_FIXTURE_OBJ.keys()), set([x.replace('.xml', '') for x in os.listdir(out_dir)]))
# XML files have tests
for suite_name in NOSE_FIXTURE_OBJ:
with open(os.path.join(out_dir, '{}.xml'.format(suite_name)), 'r') as report:
self.assertEqual(NOSE_FIXTURE_XML[suite_name], report.read())
# cleanup
shutil.rmtree(out_dir)
def test_raw_cli(self):
with mock.patch('sys.argv', ['nose2unitth', '--help']):
with self.assertRaises(SystemExit) as context:
nose2unitth.__main__.main()
self.assertRegex(context.Exception, 'usage: nose2unitth')
def test_api(self):
self.assertIsInstance(nose2unitth.Converter, type)
|
class TestNose2UnitTH(unittest.TestCase):
def test_read_nose(self):
pass
def test_write_unitth(self):
pass
def test_cli(self):
pass
def test_raw_cli(self):
pass
def test_api(self):
pass
| 6 | 0 | 9 | 2 | 6 | 1 | 2 | 0.19 | 1 | 4 | 1 | 0 | 5 | 0 | 5 | 77 | 51 | 14 | 31 | 16 | 25 | 6 | 31 | 12 | 25 | 2 | 2 | 2 | 8 |
142,200 |
KarrLab/unitth
|
KarrLab_unitth/tests/test_unitth.py
|
test_unitth.TestDummy
|
class TestDummy(unittest.TestCase):
def test_dummy_test(self):
pass
|
class TestDummy(unittest.TestCase):
def test_dummy_test(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 73 | 4 | 1 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 2 | 0 | 1 |
142,201 |
KarrLab/unitth
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/KarrLab_unitth/unitth/__main__.py
|
unitth.__main__.App.Meta
|
class Meta:
label = 'unitth'
base_controller = 'base'
handlers = [BaseController]
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0 | 4 | 4 | 3 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
142,202 |
KarrLab/unitth
|
KarrLab_unitth/unitth/core.py
|
unitth.core.UnitTH
|
class UnitTH(object):
""" UnitTH python interface """
@staticmethod
def run(xml_report_dir, xml_report_filter='TEST-', html_report_path='.',
generate_exec_time_graphs=True, html_report_dir='report.th',
initial_java_heap_size=None, maximum_java_heap_size=None):
""" Use UnitTH to generate a test history report
Args:
xml_report_dir (:obj:`str`): Parent directory of XML reports of individual builds to generate a history report of
xml_report_filter (:obj:`str`, optional): Starts-with filter for individual reports with `xml_report_dir` that should
be included in the history report. Set `xml_report_filter` to '' to include all files/subdirectories in the history
report.
html_report_path (:obj:`str`, optional): Directory of HTML reports of individual builds (relative to XML directories of
individual builds)
generate_exec_time_graphs (:obj:`bool`, optional): Whether execution time graphs shall be generated
html_report_dir (:obj:`str`, optional): directory to store generated HTML history report
initial_java_heap_size (:obj:`str`, optional): initial Java heap size
maximum_java_heap_size (:obj:`str`, optional): maximum Java heap size
"""
cmd = []
cmd.append('java')
if initial_java_heap_size:
cmd.append('-Xms{}'.format(initial_java_heap_size))
if maximum_java_heap_size:
cmd.append('-Xmx{}'.format(maximum_java_heap_size))
cmd.append('-Dunitth.xml.report.filter={}'.format(xml_report_filter))
cmd.append('-Dunitth.html.report.path={}'.format(html_report_path))
cmd.append('-Dunitth.generate.exectimegraphs={}'.format('{}'.format(generate_exec_time_graphs).lower()))
cmd.append('-Dunitth.report.dir={}'.format(html_report_dir))
cmd.append('-jar')
cmd.append('"{}"'.format(resource_filename('unitth', 'lib/unitth/unitth.jar')))
cmd.append(xml_report_dir)
subprocess.check_call(' '.join(cmd), shell=True)
|
class UnitTH(object):
''' UnitTH python interface '''
@staticmethod
def run(xml_report_dir, xml_report_filter='TEST-', html_report_path='.',
generate_exec_time_graphs=True, html_report_dir='report.th',
initial_java_heap_size=None, maximum_java_heap_size=None):
''' Use UnitTH to generate a test history report
Args:
xml_report_dir (:obj:`str`): Parent directory of XML reports of individual builds to generate a history report of
xml_report_filter (:obj:`str`, optional): Starts-with filter for individual reports with `xml_report_dir` that should
be included in the history report. Set `xml_report_filter` to '' to include all files/subdirectories in the history
report.
html_report_path (:obj:`str`, optional): Directory of HTML reports of individual builds (relative to XML directories of
individual builds)
generate_exec_time_graphs (:obj:`bool`, optional): Whether execution time graphs shall be generated
html_report_dir (:obj:`str`, optional): directory to store generated HTML history report
initial_java_heap_size (:obj:`str`, optional): initial Java heap size
maximum_java_heap_size (:obj:`str`, optional): maximum Java heap size
'''
pass
| 3 | 2 | 35 | 5 | 17 | 13 | 3 | 0.74 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 39 | 6 | 19 | 6 | 14 | 14 | 16 | 3 | 14 | 3 | 1 | 1 | 3 |
142,203 |
KarrLab/unitth
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/KarrLab_unitth/unitth/__main__.py
|
unitth.__main__.BaseController
|
class BaseController(cement.Controller):
""" Base controller for command line application """
class Meta:
label = 'base'
description = "Generate HTML unit test history report"
arguments = [
(['-v', '--version'], dict(action='version', version=unitth.__version__)),
(['xml_report_dir'], dict(type=str,
help='Parent directory of XML reports of individual builds to generate a history report of')),
(['--xml-report-filter'], dict(type=str, default='TEST-', nargs='?',
help='Starts-with filter for individual reports with `xml-report-dir` that should be included in the history report. Set `xml-report-filter` to '' to include all files/subdirectories in the history report.')),
(['--html-report-path'], dict(type=str, default='.',
help='Directory of HTML reports of individual builds(relative to XML directories of individual builds)')),
(['--generate-exec-time-graphs'], dict(type=bool, default=True,
help='Whether execution time graphs shall be generated')),
(['--html-report-dir'], dict(type=str, default='report.th',
help='directory to store generated HTML history report')),
(['--initial_java_heap_size'], dict(type=str, default=None,
help='Initial Java heap size')),
(['--maximum_java_heap_size'], dict(type=str, default=None,
help='Maximum Java heap size')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
UnitTH.run(args.xml_report_dir,
xml_report_filter=args.xml_report_filter or '',
html_report_path=args.html_report_path,
generate_exec_time_graphs=args.generate_exec_time_graphs,
html_report_dir=args.html_report_dir,
initial_java_heap_size=args.initial_java_heap_size,
maximum_java_heap_size=args.maximum_java_heap_size,
)
|
class BaseController(cement.Controller):
''' Base controller for command line application '''
class Meta:
@cement.ex(hide=True)
def _default(self):
pass
| 4 | 1 | 10 | 0 | 10 | 0 | 1 | 0.03 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 1 | 35 | 2 | 32 | 8 | 28 | 1 | 8 | 7 | 5 | 1 | 1 | 0 | 1 |
142,204 |
KarrLab/unitth
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/KarrLab_unitth/unitth/__main__.py
|
unitth.__main__.BaseController.Meta
|
class Meta:
label = 'base'
description = "Generate HTML unit test history report"
arguments = [
(['-v', '--version'], dict(action='version', version=unitth.__version__)),
(['xml_report_dir'], dict(type=str,
help='Parent directory of XML reports of individual builds to generate a history report of')),
(['--xml-report-filter'], dict(type=str, default='TEST-', nargs='?',
help='Starts-with filter for individual reports with `xml-report-dir` that should be included in the history report. Set `xml-report-filter` to '' to include all files/subdirectories in the history report.')),
(['--html-report-path'], dict(type=str, default='.',
help='Directory of HTML reports of individual builds(relative to XML directories of individual builds)')),
(['--generate-exec-time-graphs'], dict(type=bool, default=True,
help='Whether execution time graphs shall be generated')),
(['--html-report-dir'], dict(type=str, default='report.th',
help='directory to store generated HTML history report')),
(['--initial_java_heap_size'], dict(type=str, default=None,
help='Initial Java heap size')),
(['--maximum_java_heap_size'], dict(type=str, default=None,
help='Maximum Java heap size')),
]
|
class Meta:
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 20 | 0 | 20 | 4 | 19 | 0 | 4 | 4 | 3 | 0 | 0 | 0 | 0 |
142,205 |
KarrLab/unitth
|
KarrLab_unitth/tests/test_unitth.py
|
test_unitth.TestUnitTH
|
class TestUnitTH(unittest.TestCase):
def setUp(self):
report_dir = tempfile.mkdtemp()
nose_dir = os.path.join(report_dir, 'nose')
unitth_dir = os.path.join(report_dir, 'unitth')
html_dir = os.path.join(report_dir, 'html')
os.mkdir(nose_dir)
os.mkdir(unitth_dir)
os.mkdir(html_dir)
if not nose.run(argv=['nosetests', 'tests/test_unitth.py:TestDummy.test_dummy_test',
'--with-xunit', '--xunit-file', os.path.join(nose_dir, '1.xml')]):
sys.exit(1)
shutil.copyfile(os.path.join(nose_dir, '1.xml'), os.path.join(nose_dir, '2.xml'))
nose2unitth.run(os.path.join(nose_dir, '1.xml'), os.path.join(unitth_dir, '1'))
nose2unitth.run(os.path.join(nose_dir, '2.xml'), os.path.join(unitth_dir, '2'))
with open(os.path.join(os.path.join(unitth_dir, '1', 'index.html')), 'w') as html_file:
html_file.write(JunitParser(os.path.join(nose_dir, '1.xml')).html())
with open(os.path.join(os.path.join(unitth_dir, '2', 'index.html')), 'w') as html_file:
html_file.write(JunitParser(os.path.join(nose_dir, '2.xml')).html())
self._report_dir = report_dir
self._nose_dir = nose_dir
self._unitth_dir = unitth_dir
self._html_dir = html_dir
def tearDown(self):
shutil.rmtree(self._report_dir)
def test_api(self):
UnitTH.run(os.path.join(self._unitth_dir, '*'), xml_report_filter='', html_report_dir=self._html_dir)
self.assertTrue(os.path.isfile(os.path .join(self._html_dir, 'index.html')))
def test_cli(self):
argv = [
os.path.join(self._unitth_dir, '*'),
'--xml-report-filter', '',
'--html-report-dir', self._html_dir,
]
with UnitThCli(argv=argv) as app:
app.run()
self.assertTrue(os.path.isfile(os.path.join(self._html_dir, 'index.html')))
def test_raw_cli(self):
with mock.patch('sys.argv', ['unitth', '--help']):
with self.assertRaises(SystemExit) as context:
unitth.__main__.main()
self.assertRegex(context.Exception, 'usage: unitth')
def test_low_memory(self):
UnitTH.run(os.path.join(self._unitth_dir, '*'), xml_report_filter='', html_report_dir=self._html_dir,
initial_java_heap_size='32m', maximum_java_heap_size='64m')
self.assertTrue(os.path.isfile(os.path .join(self._html_dir, 'index.html')))
def test_api(self):
self.assertIsInstance(unitth.UnitTH, type)
|
class TestUnitTH(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_api(self):
pass
def test_cli(self):
pass
def test_raw_cli(self):
pass
def test_low_memory(self):
pass
def test_api(self):
pass
| 8 | 0 | 8 | 1 | 7 | 0 | 1 | 0 | 1 | 3 | 1 | 0 | 7 | 4 | 7 | 79 | 61 | 13 | 48 | 20 | 40 | 0 | 42 | 17 | 34 | 2 | 2 | 2 | 8 |
142,206 |
KartikTalwar/Duolingo
|
KartikTalwar_Duolingo/tests.py
|
tests.DuolingoLoginTest
|
class DuolingoLoginTest(unittest.TestCase):
lingo = None
@classmethod
def setUpClass(cls):
cls.lingo = duolingo.Duolingo(USERNAME, PASSWORD)
cls.lang = cls.lingo.user_data.learning_language
@classmethod
def tearDownClass(cls):
if cls.lingo:
cls.lingo.session.close()
def test_get_user_info(self):
response = self.lingo.get_user_info()
assert isinstance(response, dict)
assert "avatar" in response
assert "id" in response
assert "location" in response
assert "learning_language_string" in response
def test_get_settings(self):
response = self.lingo.get_settings()
assert isinstance(response, dict)
assert "deactivated" in response
def test_get_languages(self):
response1 = self.lingo.get_languages(abbreviations=False)
assert isinstance(response1, list)
for lang in response1:
assert isinstance(lang, str)
response2 = self.lingo.get_languages(abbreviations=True)
assert isinstance(response2, list)
for lang in response2:
assert isinstance(lang, str)
assert len(response1) == len(response2)
def test_get_friends(self):
response = self.lingo.get_friends()
assert isinstance(response, list)
for friend in response:
assert "username" in friend
assert "points" in friend
assert isinstance(friend['points'], int)
assert "languages" in friend
assert isinstance(friend['languages'], list)
for lang in friend['languages']:
assert isinstance(lang, str)
def test_get_calendar(self):
response1 = self.lingo.get_calendar()
response2 = self.lingo.get_calendar(self.lang)
for response in [response1, response2]:
assert isinstance(response, list)
for item in response:
assert "skill_id" in item
assert "improvement" in item
assert "event_type" in item
assert "datetime" in item
assert isinstance(item['datetime'], int)
def test_get_streak_info(self):
response = self.lingo.get_streak_info()
assert isinstance(response, dict)
assert "site_streak" in response
assert "daily_goal" in response
assert "streak_extended_today" in response
def test_get_leaderboard(self):
response1 = self.lingo.get_leaderboard('week', datetime.now())
response2 = self.lingo.get_leaderboard('month', datetime.now())
for response in [response1, response2]:
assert isinstance(response, list)
for item in response:
assert "points" in item
assert "unit" in item
assert "id" in item
assert "username" in item
def test_get_language_details(self):
language = self.lingo.get_language_from_abbr(self.lang)
response = self.lingo.get_language_details(language)
assert isinstance(response, dict)
assert "current_learning" in response
assert "language" in response
assert "language_string" in response
assert "learning" in response
assert "level" in response
assert "points" in response
assert "streak" in response
def test_get_language_progress(self):
response = self.lingo.get_language_progress(self.lang)
assert isinstance(response, dict)
assert "language" in response
assert "language_string" in response
assert "level_left" in response
assert "level_percent" in response
assert "level_points" in response
assert "level_progress" in response
assert "next_level" in response
assert "num_skills_learned" in response
assert "points" in response
assert "points_rank" in response
assert "streak" in response
def test_get_known_topics(self):
response = self.lingo.get_known_topics(self.lang)
assert isinstance(response, list)
for topic in response:
assert isinstance(topic, str)
def test_get_unknown_topics(self):
response = self.lingo.get_unknown_topics(self.lang)
assert isinstance(response, list)
for topic in response:
assert isinstance(topic, str)
def test_get_golden_topics(self):
response = self.lingo.get_golden_topics(self.lang)
assert isinstance(response, list)
for topic in response:
assert isinstance(topic, str)
def test_get_reviewable_topics(self):
response = self.lingo.get_reviewable_topics(self.lang)
assert isinstance(response, list)
for topic in response:
assert isinstance(topic, str)
def test_get_known_words(self):
response = self.lingo.get_known_words(self.lang)
assert isinstance(response, list)
for word in response:
assert isinstance(word, str)
def test_get_related_words(self):
# Setup
word = _example_word(self.lang)
# Get value
response = self.lingo.get_related_words(word)
# Check
assert isinstance(response, list)
def test_get_learned_skills(self):
response = self.lingo.get_learned_skills(self.lang)
assert isinstance(response, list)
for skill in response:
assert "language_string" in skill
assert "id" in skill
assert "title" in skill
assert "explanation" in skill
assert "progress_percent" in skill
assert "words" in skill
assert "name" in skill
def test_get_language_from_abbr(self):
response = self.lingo.get_language_from_abbr(self.lang)
assert isinstance(response, str)
def test_get_abbreviation_of(self):
response = self.lingo.get_abbreviation_of('french')
assert isinstance(response, str)
def test_get_translations(self):
response1 = self.lingo.get_translations('e')
response2 = self.lingo.get_translations('e', self.lang)
response3 = self.lingo.get_translations('e', self.lang, 'fr')
for response in [response1, response2, response3]:
assert isinstance(response, dict)
assert "e" in response
assert isinstance(response['e'], list)
response = self.lingo.get_translations(['e', 'a'])
assert isinstance(response, dict)
assert "e" in response
assert isinstance(response['e'], list)
assert "a" in response
assert isinstance(response['a'], list)
def test_segment_translation_word_list(self):
# Nothing should happen to short list
short_list = ["a", "e", "i", "o", "u"]
result = self.lingo._segment_translations_list(short_list)
assert result == [short_list]
# Just under count limit
just_under_count = ["a"] * 1999
result = self.lingo._segment_translations_list(just_under_count)
assert result == [just_under_count]
# Just over count limit
just_over_count = ["a"] * 2000
result = self.lingo._segment_translations_list(just_over_count)
assert result != [just_over_count]
assert result == [["a"] * 1999, ["a"]]
# Just under json length limit
just_under_length = ["aaaaaaaa"] * 1066
result = self.lingo._segment_translations_list(just_under_length)
assert result == [just_under_length]
# Just over json length limit
just_over_length = ["aaaaaaaa"] * 1067
result = self.lingo._segment_translations_list(just_over_length)
assert result != [just_over_length]
assert result == [["aaaaaaaa"] * 1066, ["aaaaaaaa"]]
def test_get_vocabulary(self):
response1 = self.lingo.get_vocabulary()
response2 = self.lingo.get_vocabulary(self.lang)
for response in [response1, response2]:
assert isinstance(response, dict)
assert response['language_string']
assert "language_string" in response
assert "learning_language" in response
assert response["learning_language"] == self.lang
assert "from_language" in response
assert "language_information" in response
assert "vocab_overview" in response
assert isinstance(response["vocab_overview"], list)
def test_get_audio_url(self):
# Setup
word = _example_word(self.lang)
# Test
response = self.lingo.get_audio_url(word)
assert isinstance(response, str)
response = self.lingo.get_audio_url(word, self.lang)
assert isinstance(response, str)
response = self.lingo.get_audio_url("zz")
assert response is None
def test_get_word_definition_by_id(self):
response = self.lingo.get_word_definition_by_id("52383869a8feb3e5cf83dbf7fab9a018")
assert isinstance(response, dict)
keys = ["alternative_forms", "translations", "learning_language_name", "from_language_name", "word"]
for key in keys:
assert key in response
def test_get_daily_xp_progress(self):
response = self.lingo.get_daily_xp_progress()
assert isinstance(response['xp_goal'], int)
assert isinstance(response['xp_today'], int)
assert isinstance(response['lessons_today'], list)
|
class DuolingoLoginTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def test_get_user_info(self):
pass
def test_get_settings(self):
pass
def test_get_languages(self):
pass
def test_get_friends(self):
pass
def test_get_calendar(self):
pass
def test_get_streak_info(self):
pass
def test_get_leaderboard(self):
pass
def test_get_language_details(self):
pass
def test_get_language_progress(self):
pass
def test_get_known_topics(self):
pass
def test_get_unknown_topics(self):
pass
def test_get_golden_topics(self):
pass
def test_get_reviewable_topics(self):
pass
def test_get_known_words(self):
pass
def test_get_related_words(self):
pass
def test_get_learned_skills(self):
pass
def test_get_language_from_abbr(self):
pass
def test_get_abbreviation_of(self):
pass
def test_get_translations(self):
pass
def test_segment_translation_word_list(self):
pass
def test_get_vocabulary(self):
pass
def test_get_audio_url(self):
pass
def test_get_word_definition_by_id(self):
pass
def test_get_daily_xp_progress(self):
pass
| 29 | 0 | 8 | 0 | 8 | 0 | 2 | 0.05 | 1 | 6 | 1 | 1 | 24 | 0 | 26 | 98 | 240 | 26 | 204 | 85 | 175 | 10 | 202 | 83 | 175 | 3 | 2 | 2 | 44 |
142,207 |
KartikTalwar/Duolingo
|
KartikTalwar_Duolingo/tests.py
|
tests.DuolingoOtherUsernameTest
|
class DuolingoOtherUsernameTest(DuolingoLoginTest):
@classmethod
def setUpClass(cls):
cls.lingo = duolingo.Duolingo(USERNAME, PASSWORD)
cls.lingo.set_username(USERNAME2)
cls.lang = cls.lingo.user_data.learning_language
def test_get_daily_xp_progress(self):
try:
self.lingo.get_daily_xp_progress()
assert False, "Should have failed to read daily XP progress."
except duolingo.DuolingoException as e:
assert USERNAME2 in str(e)
assert "Could not get daily XP progress for user" in str(e)
def test_get_vocabulary(self):
try:
self.lingo.get_vocabulary()
assert False, "Should have failed to get vocabulary."
except duolingo.OtherUserException as e:
assert "Vocab cannot be listed when the user has been switched" in str(e)
def test_get_related_words(self):
try:
word = _example_word(self.lang)
self.lingo.get_related_words(word)
assert False, "Should have failed to get related words."
except duolingo.OtherUserException as e:
assert "Vocab cannot be listed when the user has been switched" in str(e)
|
class DuolingoOtherUsernameTest(DuolingoLoginTest):
@classmethod
def setUpClass(cls):
pass
def test_get_daily_xp_progress(self):
pass
def test_get_vocabulary(self):
pass
def test_get_related_words(self):
pass
| 6 | 0 | 6 | 0 | 6 | 0 | 2 | 0 | 1 | 4 | 3 | 0 | 3 | 0 | 4 | 102 | 30 | 4 | 26 | 10 | 20 | 0 | 25 | 6 | 20 | 2 | 3 | 1 | 7 |
142,208 |
KartikTalwar/Duolingo
|
KartikTalwar_Duolingo/duolingo.py
|
duolingo.Struct
|
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
|
class Struct:
def __init__(self, **entries):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 0 | 0 | 1 |
142,209 |
KartikTalwar/Duolingo
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/KartikTalwar_Duolingo/tests.py
|
tests.DuolingoTest
|
class DuolingoTest(unittest.TestCase):
@patch("duolingo.Duolingo._get_data")
def test_password_jwt_or_file_needed(self, mock_data):
with self.assertRaises(duolingo.DuolingoException):
duolingo.Duolingo(USERNAME)
mock_data.assert_not_called()
@patch("duolingo.Duolingo._login")
@patch("duolingo.Duolingo._get_data")
def test_password_only_calls_login(self, mock_login, mock_data):
duolingo.Duolingo(USERNAME, PASSWORD)
mock_login.assert_called_once_with()
mock_data.assert_called_once_with()
@patch("duolingo.Duolingo._login")
@patch("duolingo.Duolingo._get_data")
def test_jwt_only_calls_login(self, mock_login, mock_data):
duolingo.Duolingo(USERNAME, jwt="jwt-example")
mock_login.assert_called_once_with()
mock_data.assert_called_once_with()
@patch("duolingo.Duolingo._login")
@patch("duolingo.Duolingo._get_data")
def test_file_only_calls_login(self, mock_login, mock_data):
duolingo.Duolingo(USERNAME, session_file="temp/filename.json")
mock_login.assert_called_once_with()
mock_data.assert_called_once_with()
|
class DuolingoTest(unittest.TestCase):
@patch("duolingo.Duolingo._get_data")
def test_password_jwt_or_file_needed(self, mock_data):
pass
@patch("duolingo.Duolingo._login")
@patch("duolingo.Duolingo._get_data")
def test_password_only_calls_login(self, mock_login, mock_data):
pass
@patch("duolingo.Duolingo._login")
@patch("duolingo.Duolingo._get_data")
def test_jwt_only_calls_login(self, mock_login, mock_data):
pass
@patch("duolingo.Duolingo._login")
@patch("duolingo.Duolingo._get_data")
def test_file_only_calls_login(self, mock_login, mock_data):
pass
| 12 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 2 | 2 | 0 | 4 | 0 | 4 | 76 | 28 | 4 | 24 | 9 | 12 | 0 | 17 | 5 | 12 | 1 | 2 | 1 | 4 |
142,210 |
KartikTalwar/Duolingo
|
KartikTalwar_Duolingo/duolingo.py
|
duolingo.InsufficientFundsException
|
class InsufficientFundsException(DuolingoException):
pass
|
class InsufficientFundsException(DuolingoException):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
142,211 |
KartikTalwar/Duolingo
|
KartikTalwar_Duolingo/duolingo.py
|
duolingo.AlreadyHaveStoreItemException
|
class AlreadyHaveStoreItemException(DuolingoException):
pass
|
class AlreadyHaveStoreItemException(DuolingoException):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
142,212 |
KartikTalwar/Duolingo
|
KartikTalwar_Duolingo/duolingo.py
|
duolingo.CaptchaException
|
class CaptchaException(DuolingoException):
pass
|
class CaptchaException(DuolingoException):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
142,213 |
KartikTalwar/Duolingo
|
KartikTalwar_Duolingo/duolingo.py
|
duolingo.OtherUserException
|
class OtherUserException(DuolingoException):
"""
This exception is raised when set_username() has been called to get info on another user, but a method has then
been used which cannot give data on that new user.
"""
pass
|
class OtherUserException(DuolingoException):
'''
This exception is raised when set_username() has been called to get info on another user, but a method has then
been used which cannot give data on that new user.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 6 | 0 | 2 | 1 | 1 | 4 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
142,214 |
KartikTalwar/Duolingo
|
KartikTalwar_Duolingo/duolingo.py
|
duolingo.Duolingo
|
class Duolingo(object):
USER_AGENT = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 " \
"Safari/537.36"
def __init__(self, username, password=None, *, jwt=None, session_file=None):
"""
:param username: Username to use for duolingo
:param password: Password to authenticate as user.
:param jwt: Duolingo login token. Will be checked and used if it is valid.
:param session_file: File path to a file that the session token can be stored in, to save repeated login
requests.
"""
self.username = username
self._original_username = username
self.password = password
self.session_file = session_file
self.session = requests.Session()
self.leader_data = None
self.jwt = jwt
if password or jwt or session_file:
self._login()
else:
raise DuolingoException("Password, jwt, or session_file must be specified in order to authenticate.")
self.user_data = Struct(**self._get_data())
self.voice_url_dict = None
def _make_req(self, url, data=None):
headers = {}
if self.jwt is not None:
headers['Authorization'] = 'Bearer ' + self.jwt
headers['User-Agent'] = self.USER_AGENT
req = requests.Request('POST' if data else 'GET',
url,
json=data,
headers=headers,
cookies=self.session.cookies)
prepped = req.prepare()
resp = self.session.send(prepped)
if resp.status_code == 403 and resp.json().get("blockScript") is not None:
raise CaptchaException(
"Request to URL: {}, using user agent {}, was blocked, and requested a captcha to be solved. "
"Try changing the user agent and logging in again.".format(url, self.USER_AGENT)
)
return resp
def _login(self):
"""
Authenticate through ``https://www.duolingo.com/login``.
"""
if self.jwt is None:
self._load_session_from_file()
if self._check_login():
return True
self.jwt = None
login_url = "https://www.duolingo.com/login"
data = {"login": self.username, "password": self.password}
request = self._make_req(login_url, data)
attempt = request.json()
if "failure" not in attempt:
self.jwt = request.headers['jwt']
self._save_session_to_file()
return True
raise DuolingoException("Login failed")
def _load_session_from_file(self):
if self.session_file is None:
return
try:
with open(self.session_file, "r") as f:
self.jwt = json.load(f).get("jwt_session")
except (OSError, JSONDecodeError):
return
def _save_session_to_file(self):
if self.session_file is not None:
with open(self.session_file, "w") as f:
json.dump({"jwt_session": self.jwt}, f)
def _check_login(self):
resp = self._make_req(self.get_user_url())
return resp.status_code == 200
def get_user_url_by_id(self, fields=None):
if fields is None:
fields = []
url = 'https://www.duolingo.com/2017-06-30/users/{}'.format(self.user_data.id)
fields_params = requests.utils.requote_uri(','.join(fields))
if fields_params:
url += '?fields={}'.format(fields_params)
return url
def get_user_url(self):
return "https://duolingo.com/users/%s" % self.username
def set_username(self, username):
self.username = username
self.user_data = Struct(**self._get_data())
def get_leaderboard(self, unit, before):
"""
Get user's rank in the week in descending order, stream from
``https://www.duolingo.com/friendships/leaderboard_activity?unit=week&_=time
:param unit: maybe week or month
:type unit: str
:param before: Datetime in format '2015-07-06 05:42:24'
:type before: Union[datetime, str]
:rtype: List
"""
if not unit:
raise ValueError('Needs unit as argument (week or month)')
if not before:
raise ValueError('Needs str in Datetime format "%Y.%m.%d %H:%M:%S"')
if isinstance(before, datetime):
before = before.strftime("%Y.%m.%d %H:%M:%S")
url = 'https://www.duolingo.com/friendships/leaderboard_activity?unit={}&_={}'
url = url.format(unit, before)
self.leader_data = self._make_req(url).json()
data = []
for result in self.get_friends():
for value in self.leader_data['ranking']:
if result['id'] == int(value):
temp = {'points': int(self.leader_data['ranking'][value]),
'unit': unit,
'id': result['id'],
'username': result['username']}
data.append(temp)
return sorted(data, key=lambda user: user['points'], reverse=True)
def buy_item(self, item_name, abbr):
url = 'https://www.duolingo.com/2017-06-30/users/{}/shop-items'
url = url.format(self.user_data.id)
data = {'itemName': item_name, 'learningLanguage': abbr}
request = self._make_req(url, data)
"""
status code '200' indicates that the item was purchased
returns a text like: {"streak_freeze":"2017-01-10 02:39:59.594327"}
"""
if request.status_code == 400:
resp_json = request.json()
if resp_json.get("error") == "ALREADY_HAVE_STORE_ITEM":
raise AlreadyHaveStoreItemException("Already equipped with {}.".format(item_name))
if resp_json.get("error") == "INSUFFICIENT_FUNDS":
raise InsufficientFundsException("Insufficient funds to purchase {}.".format(item_name))
raise DuolingoException(
"Duolingo returned an unknown error while trying to purchase {}: {}".format(
item_name, resp_json.get("error")
)
)
if not request.ok:
# any other error:
raise DuolingoException("Not possible to buy item.")
def buy_streak_freeze(self):
"""
figure out the users current learning language
use this one as parameter for the shop
"""
lang = self.get_abbreviation_of(self.get_user_info()['learning_language_string'])
if lang is None:
raise DuolingoException('No learning language found')
try:
self.buy_item('streak_freeze', lang)
return True
except AlreadyHaveStoreItemException:
return False
def buy_weekend_amulet(self):
"""
figure out the users current learning language
use this one as parameter for the shop
"""
lang = self.get_abbreviation_of(self.get_user_info()['learning_language_string'])
if lang is None:
raise DuolingoException('No learning language found')
try:
self.buy_item('weekend_amulet', lang)
return True
except AlreadyHaveStoreItemException:
return False
def _switch_language(self, lang):
"""
Change the learned language with
``https://www.duolingo.com/switch_language``.
:param lang: Wanted language abbreviation (example: ``'fr'``)
:type lang: str
"""
data = {"learning_language": lang}
url = "https://www.duolingo.com/switch_language"
request = self._make_req(url, data)
try:
parse = request.json()['tracking_properties']
if parse['learning_language'] == lang:
self.user_data = Struct(**self._get_data())
except ValueError:
raise DuolingoException('Failed to switch language')
def get_data_by_user_id(self, fields=None):
"""
Get user's data from ``https://www.duolingo.com/2017-06-30/users/<user_id>``.
"""
if fields is None:
fields = []
get = self._make_req(self.get_user_url_by_id(fields))
if get.status_code == 404:
raise DuolingoException('User not found')
else:
return get.json()
def _get_data(self):
"""
Get user's data from ``https://www.duolingo.com/users/<username>``.
"""
get = self._make_req(self.get_user_url())
if get.status_code == 404:
raise Exception('User not found')
else:
return get.json()
@staticmethod
def _make_dict(keys, array):
data = {}
for key in keys:
if type(array) == dict:
data[key] = array[key]
else:
data[key] = getattr(array, key, None)
return data
@staticmethod
def _compute_dependency_order_func(skills):
# Create dictionary:
skills_dict = {}
for skill in skills:
skills_dict[skill['name']] = skill
# Get ordinal for all dependencies
for skill in skills:
skill['dependency_order'] = Duolingo._get_skill_ordinal(skills_dict, skill, [])
@staticmethod
def _get_skill_ordinal(skills_dict, skill, breadcrumbs):
# If name is already in breadcrumbs, we've found a loop
if skill['name'] in breadcrumbs:
raise DuolingoException("Loop encountered: {}".format(breadcrumbs + [skill['name']]))
# If order already set for this skill, return it
if "dependency_order" in skill:
return skill["dependency_order"]
# If no dependencies, set order on this skill to 1
if not skill['dependencies_name']:
skill['dependency_order'] = 1
return 1
# Calculate order based on order of dependencies
new_breadcrumbs = breadcrumbs + [skill['name']]
order = 1 + max(
[
Duolingo._get_skill_ordinal(
skills_dict,
skills_dict[name],
new_breadcrumbs
)
for name in skill['dependencies_name']
]
)
skill["dependency_order"] = order
return order
def get_settings(self):
"""Get user settings."""
keys = ['notify_comment', 'deactivated', 'is_follower_by',
'is_following']
return self._make_dict(keys, self.user_data)
def get_languages(self, abbreviations=False):
"""
Get practiced languages.
:param abbreviations: Get language as abbreviation or not
:type abbreviations: bool
:return: List of languages
:rtype: list of str
"""
data = []
for lang in self.user_data.languages:
if lang['learning']:
if abbreviations:
data.append(lang['language'])
else:
data.append(lang['language_string'])
return data
def get_language_from_abbr(self, abbr):
"""Get language full name from abbreviation."""
for language in self.user_data.languages:
if language['language'] == abbr:
return language['language_string']
return None
def get_abbreviation_of(self, name):
"""Get abbreviation of a language."""
for language in self.user_data.languages:
if language['language_string'].lower() == name.lower():
return language['language']
return None
def get_language_details(self, language):
"""Get user's status about a language."""
for lang in self.user_data.languages:
if language == lang['language_string']:
return lang
return {}
def get_user_info(self):
"""Get user's informations."""
fields = ['username', 'bio', 'id', 'num_following', 'cohort',
'language_data', 'num_followers', 'learning_language_string',
'created', 'contribution_points', 'gplus_id', 'twitter_id',
'admin', 'invites_left', 'location', 'fullname', 'avatar',
'ui_language']
return self._make_dict(fields, self.user_data)
def get_streak_info(self):
"""Get user's streak informations."""
fields = ['daily_goal', 'site_streak', 'streak_extended_today']
return self._make_dict(fields, self.user_data)
def _is_current_language(self, abbr):
"""Get if user is learning a language."""
return abbr in self.user_data.language_data.keys()
def get_calendar(self, language_abbr=None):
"""Get user's last actions."""
if language_abbr:
if not self._is_current_language(language_abbr):
self._switch_language(language_abbr)
return self.user_data.language_data[language_abbr]['calendar']
else:
return self.user_data.calendar
def get_language_progress(self, lang):
"""Get informations about user's progression in a language."""
if not self._is_current_language(lang):
self._switch_language(lang)
fields = ['streak', 'language_string', 'level_progress',
'num_skills_learned', 'level_percent', 'level_points',
'points_rank', 'next_level', 'level_left', 'language',
'points', 'fluency_score', 'level']
return self._make_dict(fields, self.user_data.language_data[lang])
def get_friends(self):
"""Get user's friends."""
for k, v in self.user_data.language_data.items():
data = []
for friend in v['points_ranking_data']:
temp = {'username': friend['username'],
'id': friend['id'],
'points': friend['points_data']['total'],
'languages': [i['language_string'] for i in
friend['points_data']['languages']]}
data.append(temp)
return data
def get_known_words(self, lang):
"""Get a list of all words learned by user in a language."""
words = []
for topic in self.user_data.language_data[lang]['skills']:
if topic['learned']:
words += topic['words']
return list(set(words))
def get_learned_skills(self, lang):
"""
Return the learned skill objects sorted by the order they were learned
in.
"""
skills = [
skill for skill in self.user_data.language_data[lang]['skills']
]
self._compute_dependency_order_func(skills)
return [skill for skill in
sorted(skills, key=lambda skill: skill['dependency_order'])
if skill['learned']]
def get_known_topics(self, lang):
"""Return the topics learned by a user in a language."""
return [topic['title']
for topic in self.user_data.language_data[lang]['skills']
if topic['learned']]
def get_unknown_topics(self, lang):
"""Return the topics remaining to learn by a user in a language."""
return [topic['title']
for topic in self.user_data.language_data[lang]['skills']
if not topic['learned']]
def get_golden_topics(self, lang):
"""Return the topics mastered ("golden") by a user in a language."""
return [topic['title']
for topic in self.user_data.language_data[lang]['skills']
if topic['learned'] and topic['strength'] == 1.0]
def get_reviewable_topics(self, lang):
"""Return the topics learned but not golden by a user in a language."""
return [topic['title']
for topic in self.user_data.language_data[lang]['skills']
if topic['learned'] and topic['strength'] < 1.0]
def get_translations(self, words, source=None, target=None):
"""
Get words' translations from
``https://d2.duolingo.com/api/1/dictionary/hints/<source>/<target>?tokens=``<words>``
:param words: A single word or a list
:type: str or list of str
:param source: Source language as abbreviation
:type source: str
:param target: Destination language as abbreviation
:type target: str
:return: Dict with words as keys and translations as values
"""
if not source:
source = self.user_data.ui_language
if not target:
target = list(self.user_data.language_data.keys())[0]
list_segments = self._segment_translations_list(words)
results = dict()
for segment in list_segments:
results = {**results, **self._get_raw_translations(segment, source, target)}
return results
def _segment_translations_list(self, words):
# These seem to be the length limits before Duolingo's API rejects the request
word_count_limit = 2000
word_json_limit = 12800
# Handy internal function
def is_word_list_valid(word_list):
return (
len(word_list) < word_count_limit
and len(json.dumps(word_list)) < word_json_limit
)
# Fast return for simple lists
if is_word_list_valid(words):
return [words]
# Start building segments until they trip the limits
segments = []
segment = []
for word in words:
if not is_word_list_valid(segment + [word]):
segments.append(segment)
segment = []
segment.append(word)
segments.append(segment)
return segments
def _get_raw_translations(self, words, target, source):
word_parameter = json.dumps(words, separators=(',', ':'))
url = "https://d2.duolingo.com/api/1/dictionary/hints/{}/{}?tokens={}" \
.format(target, source, word_parameter)
request = self.session.get(url)
try:
return request.json()
except ValueError:
raise DuolingoException('Could not get translations')
def get_vocabulary(self, language_abbr=None):
"""Get overview of user's vocabulary in a language."""
if self.username != self._original_username:
raise OtherUserException("Vocab cannot be listed when the user has been switched.")
if language_abbr and not self._is_current_language(language_abbr):
self._switch_language(language_abbr)
overview_url = "https://www.duolingo.com/vocabulary/overview"
overview_request = self._make_req(overview_url)
overview = overview_request.json()
return overview
_cloudfront_server_url = None
_homepage_text = None
@property
def _homepage(self):
if self._homepage_text:
return self._homepage_text
homepage_url = "https://www.duolingo.com"
request = self._make_req(homepage_url)
self._homepage_text = request.text
return self._homepage
@property
def _cloudfront_server(self):
if self._cloudfront_server_url:
return self._cloudfront_server_url
server_list = re.search(r'//.+\.cloudfront\.net', self._homepage)
self._cloudfront_server_url = "https:{}".format(server_list.group(0))
return self._cloudfront_server_url
_tts_voices = None
def _process_tts_voices(self):
voices_js = re.search(r'duo\.tts_multi_voices = {.+};',
self._homepage).group(0)
voices = voices_js[voices_js.find("{"):voices_js.find("}") + 1]
self._tts_voices = json.loads(voices)
def get_language_voices(self, language_abbr=None):
if not language_abbr:
language_abbr = list(self.user_data.language_data.keys())[0]
voices = []
if not self._tts_voices:
self._process_tts_voices()
for voice in self._tts_voices[language_abbr]:
if voice == language_abbr:
voices.append('default')
else:
voices.append(voice.replace('{}/'.format(language_abbr), ''))
return voices
def get_audio_url(self, word, language_abbr=None, rand=True, voice=None):
# Check word is in vocab
if word is None:
raise DuolingoException('A word must be specified to use this function')
word = word.lower()
# Get default language abbr
if not language_abbr:
language_abbr = list(self.user_data.language_data.keys())[0]
if language_abbr not in self.user_data.language_data:
raise DuolingoException("This language is not one you are studying")
# Populate voice url dict
if self.voice_url_dict is None or language_abbr not in self.voice_url_dict:
self._populate_voice_url_dictionary(language_abbr)
# If no audio exists for a word, return None
if word not in self.voice_url_dict[language_abbr]:
return None
# Get word audio links
word_links = list(self.voice_url_dict[language_abbr][word])
# If a voice is specified, get that one or None
if voice:
for word_link in word_links:
if "/{}/".format(voice) in word_link:
return word_link
return None
# If random, shuffle
if rand:
return random.choice(word_links)
return word_links[0]
def _populate_voice_url_dictionary(self, lang_abbr):
if self.voice_url_dict is None:
self.voice_url_dict = {}
self.voice_url_dict[lang_abbr] = {}
# Get skill IDs
skill_ids = []
for skill in self.user_data.language_data[lang_abbr]['skills']:
skill_ids.append(skill['id'])
# Scrape all sessions and create voice url dictionary
for skill_id in skill_ids:
req_data = {
"fromLanguage": "en" if lang_abbr != "en" else "de",
"learningLanguage": lang_abbr,
"challengeTypes": ["definition", "translate"],
"skillId": skill_id,
"type": "SKILL_PRACTICE",
"juicy": True,
"smartTipsVersion": 2
}
resp = self._make_req("https://www.duolingo.com/2017-06-30/sessions", req_data)
if resp.status_code != 200:
continue
resp_data = resp.json()
for challenge in resp_data['challenges']:
if "prompt" in challenge and "tts" in challenge:
self._add_to_voice_url_dict(lang_abbr, challenge['prompt'], challenge['tts'])
if challenge.get("metadata") and challenge['metadata'].get("non_character_tts"):
for word, url in challenge['metadata']['non_character_tts']['tokens'].items():
self._add_to_voice_url_dict(lang_abbr, word, url)
if "tokens" in challenge:
self._add_token_list_to_voice_url_dict(lang_abbr, challenge["tokens"])
def _add_token_list_to_voice_url_dict(self, lang_abbr, token_list):
for token in token_list:
if isinstance(token, list):
self._add_token_list_to_voice_url_dict(lang_abbr, token)
if isinstance(token, dict) and token.get("tts") and token.get("value"):
self._add_to_voice_url_dict(lang_abbr, token['value'], token['tts'])
def _add_to_voice_url_dict(self, lang_abbr, word, url):
word = word.lower()
if word not in self.voice_url_dict[lang_abbr]:
self.voice_url_dict[lang_abbr][word] = set()
self.voice_url_dict[lang_abbr][word].add(url)
def get_related_words(self, word, language_abbr=None):
overview = self.get_vocabulary(language_abbr)
for word_data in overview['vocab_overview']:
if word_data['normalized_string'] == word.lower():
related_lexemes = word_data['related_lexemes']
return [w for w in overview['vocab_overview']
if w['lexeme_id'] in related_lexemes]
def get_word_definition_by_id(self, lexeme_id):
"""
Get the dictionary entry from
``https://www.duolingo.com/api/1/dictionary_page?lexeme_id=``<lexeme_id>``
:param lexeme_id: Identifier of the word
:type: str
:return: The dictionary entry for the given word
"""
url = "https://www.duolingo.com/api/1/dictionary_page?lexeme_id=%s" % lexeme_id
request = self.session.get(url)
try:
return request.json()
except:
raise Exception('Could not get word definition')
def get_daily_xp_progress(self):
daily_progress = self.get_data_by_user_id(["xpGoal", "xpGains", "streakData"])
if not daily_progress:
raise DuolingoException(
"Could not get daily XP progress for user \"{}\". Are you logged in as that user?".format(self.username)
)
# xpGains lists the lessons completed on the last day where lessons were done.
# We use the streakData.updatedTimestamp to get the last "midnight", and get lessons after that.
reported_timestamp = daily_progress['streakData']['updatedTimestamp']
reported_midnight = datetime.fromtimestamp(reported_timestamp)
midnight = datetime.fromordinal(datetime.today().date().toordinal())
# Sometimes the update is marked into the future. When this is the case
# we fall back on the system time for midnight.
time_discrepancy = min(midnight - reported_midnight, timedelta(0))
update_cutoff = round((reported_midnight + time_discrepancy).timestamp())
lessons = [lesson for lesson in daily_progress['xpGains'] if
lesson['time'] > update_cutoff]
return {
"xp_goal": daily_progress['xpGoal'],
"lessons_today": lessons,
"xp_today": sum(x['xp'] for x in lessons)
}
|
class Duolingo(object):
def __init__(self, username, password=None, *, jwt=None, session_file=None):
'''
:param username: Username to use for duolingo
:param password: Password to authenticate as user.
:param jwt: Duolingo login token. Will be checked and used if it is valid.
:param session_file: File path to a file that the session token can be stored in, to save repeated login
requests.
'''
pass
def _make_req(self, url, data=None):
pass
def _login(self):
'''
Authenticate through ``https://www.duolingo.com/login``.
'''
pass
def _load_session_from_file(self):
pass
def _save_session_to_file(self):
pass
def _check_login(self):
pass
def get_user_url_by_id(self, fields=None):
pass
def get_user_url_by_id(self, fields=None):
pass
def set_username(self, username):
pass
def get_leaderboard(self, unit, before):
'''
Get user's rank in the week in descending order, stream from
``https://www.duolingo.com/friendships/leaderboard_activity?unit=week&_=time
:param unit: maybe week or month
:type unit: str
:param before: Datetime in format '2015-07-06 05:42:24'
:type before: Union[datetime, str]
:rtype: List
'''
pass
def buy_item(self, item_name, abbr):
pass
def buy_streak_freeze(self):
'''
figure out the users current learning language
use this one as parameter for the shop
'''
pass
def buy_weekend_amulet(self):
'''
figure out the users current learning language
use this one as parameter for the shop
'''
pass
def _switch_language(self, lang):
'''
Change the learned language with
``https://www.duolingo.com/switch_language``.
:param lang: Wanted language abbreviation (example: ``'fr'``)
:type lang: str
'''
pass
def get_data_by_user_id(self, fields=None):
'''
Get user's data from ``https://www.duolingo.com/2017-06-30/users/<user_id>``.
'''
pass
def _get_data(self):
'''
Get user's data from ``https://www.duolingo.com/users/<username>``.
'''
pass
@staticmethod
def _make_dict(keys, array):
pass
@staticmethod
def _compute_dependency_order_func(skills):
pass
@staticmethod
def _get_skill_ordinal(skills_dict, skill, breadcrumbs):
pass
def get_settings(self):
'''Get user settings.'''
pass
def get_languages(self, abbreviations=False):
'''
Get practiced languages.
:param abbreviations: Get language as abbreviation or not
:type abbreviations: bool
:return: List of languages
:rtype: list of str
'''
pass
def get_language_from_abbr(self, abbr):
'''Get language full name from abbreviation.'''
pass
def get_abbreviation_of(self, name):
'''Get abbreviation of a language.'''
pass
def get_language_details(self, language):
'''Get user's status about a language.'''
pass
def get_user_info(self):
'''Get user's informations.'''
pass
def get_streak_info(self):
'''Get user's streak informations.'''
pass
def _is_current_language(self, abbr):
'''Get if user is learning a language.'''
pass
def get_calendar(self, language_abbr=None):
'''Get user's last actions.'''
pass
def get_language_progress(self, lang):
'''Get informations about user's progression in a language.'''
pass
def get_friends(self):
'''Get user's friends.'''
pass
def get_known_words(self, lang):
'''Get a list of all words learned by user in a language.'''
pass
def get_learned_skills(self, lang):
'''
Return the learned skill objects sorted by the order they were learned
in.
'''
pass
def get_known_topics(self, lang):
'''Return the topics learned by a user in a language.'''
pass
def get_unknown_topics(self, lang):
'''Return the topics remaining to learn by a user in a language.'''
pass
def get_golden_topics(self, lang):
'''Return the topics mastered ("golden") by a user in a language.'''
pass
def get_reviewable_topics(self, lang):
'''Return the topics learned but not golden by a user in a language.'''
pass
def get_translations(self, words, source=None, target=None):
'''
Get words' translations from
``https://d2.duolingo.com/api/1/dictionary/hints/<source>/<target>?tokens=``<words>``
:param words: A single word or a list
:type: str or list of str
:param source: Source language as abbreviation
:type source: str
:param target: Destination language as abbreviation
:type target: str
:return: Dict with words as keys and translations as values
'''
pass
def _segment_translations_list(self, words):
pass
def is_word_list_valid(word_list):
pass
def _get_raw_translations(self, words, target, source):
pass
def get_vocabulary(self, language_abbr=None):
'''Get overview of user's vocabulary in a language.'''
pass
@property
def _homepage(self):
pass
@property
def _cloudfront_server(self):
pass
def _process_tts_voices(self):
pass
def get_language_voices(self, language_abbr=None):
pass
def get_audio_url(self, word, language_abbr=None, rand=True, voice=None):
pass
def _populate_voice_url_dictionary(self, lang_abbr):
pass
def _add_token_list_to_voice_url_dict(self, lang_abbr, token_list):
pass
def _add_to_voice_url_dict(self, lang_abbr, word, url):
pass
def get_related_words(self, word, language_abbr=None):
pass
def get_word_definition_by_id(self, lexeme_id):
'''
Get the dictionary entry from
``https://www.duolingo.com/api/1/dictionary_page?lexeme_id=``<lexeme_id>``
:param lexeme_id: Identifier of the word
:type: str
:return: The dictionary entry for the given word
'''
pass
def get_daily_xp_progress(self):
pass
| 58 | 28 | 12 | 1 | 9 | 2 | 3 | 0.24 | 1 | 19 | 6 | 0 | 48 | 9 | 51 | 51 | 681 | 102 | 467 | 167 | 409 | 112 | 386 | 160 | 333 | 11 | 1 | 4 | 148 |
142,215 |
KartikTalwar/Duolingo
|
KartikTalwar_Duolingo/duolingo.py
|
duolingo.DuolingoException
|
class DuolingoException(Exception):
pass
|
class DuolingoException(Exception):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 4 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
142,216 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_rom.py
|
nes_py.tests.test_rom.ShouldReadLegendOfZelda
|
class ShouldReadLegendOfZelda(ShouldReadROMHeaderTestCase, TestCase):
"""Check The Legend Of Zelda ROM."""
rom_name = 'the-legend-of-zelda.nes'
prg_rom_size = 128
chr_rom_size = 0
prg_ram_size = 8
mapper = 1
is_ignore_mirroring = False
has_trainer = False
has_battery_backed_ram = True
is_vertical_mirroring = False
has_play_choice_10 = False
has_vs_unisystem = False
is_pal = False
trainer_rom_start = 16
trainer_rom_stop = 16
prg_rom_start = 16
prg_rom_stop = 16 + 128 * 2**10
chr_rom_start = 16 + 128 * 2**10
chr_rom_stop = (16 + 128 * 2**10) + 0
|
class ShouldReadLegendOfZelda(ShouldReadROMHeaderTestCase, TestCase):
'''Check The Legend Of Zelda ROM.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.05 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 21 | 1 | 19 | 19 | 18 | 1 | 19 | 19 | 18 | 0 | 2 | 0 | 0 |
142,217 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_rom.py
|
nes_py.tests.test_rom.ShouldReadExcitebike
|
class ShouldReadExcitebike(ShouldReadROMHeaderTestCase, TestCase):
"""Check the Excitebike ROM."""
rom_name = 'excitebike.nes'
prg_rom_size = 16
chr_rom_size = 8
prg_ram_size = 8
mapper = 0
is_ignore_mirroring = False
has_trainer = False
has_battery_backed_ram = False
is_vertical_mirroring = True
has_play_choice_10 = False
has_vs_unisystem = False
is_pal = False
trainer_rom_start = 16
trainer_rom_stop = 16
prg_rom_start = 16
prg_rom_stop = 16 + 16 * 2**10
chr_rom_start = 16 + 16 * 2**10
chr_rom_stop = (16 + 16 * 2**10) + (8 * 2**10)
|
class ShouldReadExcitebike(ShouldReadROMHeaderTestCase, TestCase):
'''Check the Excitebike ROM.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.05 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 21 | 1 | 19 | 19 | 18 | 1 | 19 | 19 | 18 | 0 | 2 | 0 | 0 |
142,218 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_rom.py
|
nes_py.tests.test_rom.ShouldNotCreateInstanceOfROMWithoutPath
|
class ShouldNotCreateInstanceOfROMWithoutPath(TestCase):
def test(self):
self.assertRaises(TypeError, ROM)
|
class ShouldNotCreateInstanceOfROMWithoutPath(TestCase):
def test(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 73 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 2 | 0 | 1 |
142,219 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_rom.py
|
nes_py.tests.test_rom.ShouldNotCreateInstanceOfROMWithInvaldROMFile
|
class ShouldNotCreateInstanceOfROMWithInvaldROMFile(TestCase):
def test(self):
empty = rom_file_abs_path('empty.nes')
self.assertRaises(ValueError, lambda: ROM(empty))
|
class ShouldNotCreateInstanceOfROMWithInvaldROMFile(TestCase):
def test(self):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 73 | 4 | 0 | 4 | 3 | 2 | 0 | 4 | 3 | 2 | 1 | 2 | 0 | 1 |
142,220 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldRaiseValueErrorOnNoniNES_ROMPath
|
class ShouldRaiseValueErrorOnNoniNES_ROMPath(TestCase):
def test(self):
self.assertRaises(ValueError, NESEnv, rom_file_abs_path('blank'))
|
class ShouldRaiseValueErrorOnNoniNES_ROMPath(TestCase):
def test(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 73 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 2 | 0 | 1 |
142,221 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldStepEnvBackupRestore
|
class ShouldStepEnvBackupRestore(TestCase):
def test(self):
done = True
env = create_smb1_instance()
for _ in range(250):
if done:
state = env.reset()
done = False
state, _, done, _ = env.step(0)
backup = state.copy()
env._backup()
for _ in range(250):
if done:
state = env.reset()
done = False
state, _, done, _ = env.step(0)
self.assertFalse(np.array_equal(backup, state))
env._restore()
self.assertTrue(np.array_equal(backup, env.screen))
env.close()
|
class ShouldStepEnvBackupRestore(TestCase):
def test(self):
pass
| 2 | 0 | 24 | 5 | 19 | 0 | 5 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 73 | 25 | 5 | 20 | 7 | 18 | 0 | 20 | 7 | 18 | 5 | 2 | 2 | 5 |
142,222 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldReadAndWriteMemory
|
class ShouldReadAndWriteMemory(TestCase):
def test(self):
env = create_smb1_instance()
env.reset()
for _ in range(90):
env.step(8)
env.step(0)
self.assertEqual(129, env.ram[0x0776])
env.ram[0x0776] = 0
self.assertEqual(0, env.ram[0x0776])
env.close()
|
class ShouldReadAndWriteMemory(TestCase):
def test(self):
pass
| 2 | 0 | 10 | 0 | 10 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 73 | 11 | 0 | 11 | 4 | 9 | 0 | 11 | 4 | 9 | 2 | 2 | 1 | 2 |
142,223 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldStepEnv
|
class ShouldStepEnv(TestCase):
def test(self):
env = create_smb1_instance()
done = True
for _ in range(500):
if done:
# reset the environment and check the output value
state = env.reset()
self.assertIsInstance(state, np.ndarray)
# sample a random action and check it
action = env.action_space.sample()
self.assertIsInstance(action, int)
# take a step and check the outputs
output = env.step(action)
self.assertIsInstance(output, tuple)
self.assertEqual(4, len(output))
# check each output
state, reward, done, info = output
self.assertIsInstance(state, np.ndarray)
self.assertIsInstance(reward, float)
self.assertIsInstance(done, bool)
self.assertIsInstance(info, dict)
# check the render output
render = env.render('rgb_array')
self.assertIsInstance(render, np.ndarray)
env.reset()
env.close()
|
class ShouldStepEnv(TestCase):
def test(self):
pass
| 2 | 0 | 26 | 0 | 21 | 5 | 3 | 0.23 | 1 | 6 | 0 | 0 | 1 | 0 | 1 | 73 | 27 | 0 | 22 | 10 | 20 | 5 | 22 | 10 | 20 | 3 | 2 | 2 | 3 |
142,224 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldResetAndCloseEnv
|
class ShouldResetAndCloseEnv(TestCase):
def test(self):
env = create_smb1_instance()
env.reset()
env.close()
# trying to close again should raise an error
self.assertRaises(ValueError, env.close)
|
class ShouldResetAndCloseEnv(TestCase):
def test(self):
pass
| 2 | 0 | 6 | 0 | 5 | 1 | 1 | 0.17 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 73 | 7 | 0 | 6 | 3 | 4 | 1 | 6 | 3 | 4 | 1 | 2 | 0 | 1 |
142,225 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_rom.py
|
nes_py.tests.test_rom.ShouldReadSuperMarioBros2
|
class ShouldReadSuperMarioBros2(ShouldReadROMHeaderTestCase, TestCase):
"""Check the Super Mario Bros 2 ROM."""
rom_name = 'super-mario-bros-2.nes'
prg_rom_size = 128
chr_rom_size = 128
prg_ram_size = 8
mapper = 4
is_ignore_mirroring = False
has_trainer = False
has_battery_backed_ram = False
is_vertical_mirroring = False
has_play_choice_10 = False
has_vs_unisystem = False
is_pal = False
trainer_rom_start = 16
trainer_rom_stop = 16
prg_rom_start = 16
prg_rom_stop = 16 + 128 * 2**10
chr_rom_start = 16 + 128 * 2**10
chr_rom_stop = (16 + 128 * 2**10) + (128 * 2**10)
|
class ShouldReadSuperMarioBros2(ShouldReadROMHeaderTestCase, TestCase):
'''Check the Super Mario Bros 2 ROM.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.05 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 21 | 1 | 19 | 19 | 18 | 1 | 19 | 19 | 18 | 0 | 2 | 0 | 0 |
142,226 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_rom.py
|
nes_py.tests.test_rom.ShouldReadSuperMarioBros
|
class ShouldReadSuperMarioBros(ShouldReadROMHeaderTestCase, TestCase):
"""Check the Super Mario Bros 1 ROM."""
rom_name = 'super-mario-bros-1.nes'
prg_rom_size = 32
chr_rom_size = 8
prg_ram_size = 8
mapper = 0
is_ignore_mirroring = False
has_trainer = False
has_battery_backed_ram = False
is_vertical_mirroring = True
has_play_choice_10 = False
has_vs_unisystem = False
is_pal = False
trainer_rom_start = 16
trainer_rom_stop = 16
prg_rom_start = 16
prg_rom_stop = 16 + 32 * 2**10
chr_rom_start = 16 + 32 * 2**10
chr_rom_stop = (16 + 32 * 2**10) + (8 * 2**10)
|
class ShouldReadSuperMarioBros(ShouldReadROMHeaderTestCase, TestCase):
'''Check the Super Mario Bros 1 ROM.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.05 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 21 | 1 | 19 | 19 | 18 | 1 | 19 | 19 | 18 | 0 | 2 | 0 | 0 |
142,227 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_rom.py
|
nes_py.tests.test_rom.ShouldReadROMHeaderTestCase
|
class ShouldReadROMHeaderTestCase(object):
"""The general form of a test case to check the header of a ROM."""
# the name of the ROM to test the header of
rom_name = None
# the amount of program memory in the ROM (KB)
prg_rom_size = None
# the amount of character map memory in the ROM (KB)
chr_rom_size = None
# the amount of program RAM in the ROM (KB)
prg_ram_size = None
# the number of the mapper this ROM uses
mapper = None
# whether to ignore the mirroring setting bit
is_ignore_mirroring = None
# whether the ROM contains a trainer block
has_trainer = None
# whether the cartridge of the ROM has a battery-backed RAM module
has_battery_backed_ram = None
# the mirroring mode used by the ROM
is_vertical_mirroring = None
# whether the ROM uses PlayChoice-10 (8KB of Hint Screen after CHR data)
has_play_choice_10 = None
# whether the ROM uses VS Unisystem
has_vs_unisystem = None
# the TV system the ROM is designed for
is_pal = None
# the address the trainer ROM starts at
trainer_rom_start = None
# the address the trainer ROM stops at
trainer_rom_stop = None
# the address the PRG ROM starts at
prg_rom_start = None
# the address the PRG ROM stops at
prg_rom_stop = None
# the address the CHR ROM starts at
chr_rom_start = None
# the address the CHR ROM stops at
chr_rom_stop = None
def setUp(self):
"""Perform setup before each test."""
rom_path = rom_file_abs_path(self.rom_name)
self.rom = ROM(rom_path)
def test_header_length(self):
"""Check the length of the header."""
self.assertEqual(16, len(self.rom.header))
def test_prg_rom_size(self):
"""Check the PRG ROM size."""
self.assertEqual(self.prg_rom_size, self.rom.prg_rom_size)
def test_chr_rom_size(self):
"""Check the CHR ROM size."""
self.assertEqual(self.chr_rom_size, self.rom.chr_rom_size)
def test_prg_ram_size(self):
"""Check the PRG RAM size."""
self.assertEqual(self.prg_ram_size, self.rom.prg_ram_size)
def test_mapper(self):
"""Check the mapper number."""
self.assertEqual(self.mapper, self.rom.mapper)
def test_is_ignore_mirroring(self):
"""Check whether the ROM is ignoring the mirroring mode."""
expected = self.is_ignore_mirroring
actual = self.rom.is_ignore_mirroring
self.assertEqual(expected, actual)
def test_has_trainer(self):
"""Check whether the ROM has a trainer block or not."""
self.assertEqual(self.has_trainer, self.rom.has_trainer)
def test_has_battery_backed_ram(self):
"""Check whether the ROM has battery-backed RAM."""
expected = self.has_battery_backed_ram
actual = self.rom.has_battery_backed_ram
self.assertEqual(expected, actual)
def test_is_vertical_mirroring(self):
"""Check the mirroring mode of the ROM."""
self.assertEqual(self.is_vertical_mirroring, self.rom.is_vertical_mirroring)
def test_has_play_choice_10(self):
"""Check whether the ROM uses PlayChoice-10."""
self.assertEqual(self.has_play_choice_10, self.rom.has_play_choice_10)
def test_has_vs_unisystem(self):
"""Check whether the ROM uses a VS Unisystem."""
self.assertEqual(self.has_vs_unisystem, self.rom.has_vs_unisystem)
def test_is_pal(self):
"""Check which TV mode the ROM is designed for."""
self.assertEqual(self.is_pal, self.rom.is_pal)
def test_trainer_rom_start(self):
"""Check the starting address of trainer ROM."""
self.assertEqual(self.trainer_rom_start, self.rom.trainer_rom_start)
def test_trainer_rom_stop(self):
"""Check the stopping address of trainer ROM."""
self.assertEqual(self.trainer_rom_stop, self.rom.trainer_rom_stop)
def test_trainer_rom(self):
"""Check the trainer ROM."""
size = self.trainer_rom_stop - self.trainer_rom_start
self.assertEqual(size, len(self.rom.trainer_rom))
def test_prg_rom_start(self):
"""Check the starting address of PRG ROM."""
self.assertEqual(self.prg_rom_start, self.rom.prg_rom_start)
def test_prg_rom_stop(self):
"""Check the stopping address of PRG ROM."""
self.assertEqual(self.prg_rom_stop, self.rom.prg_rom_stop)
def test_prg_rom(self):
"""Check the PRG ROM."""
size = (self.prg_rom_stop - self.prg_rom_start)
self.assertEqual(size, len(self.rom.prg_rom))
def test_chr_rom_start(self):
"""Check the starting address of CHR ROM."""
self.assertEqual(self.chr_rom_start, self.rom.chr_rom_start)
def test_chr_rom_stop(self):
"""Check the stopping address of CHR ROM."""
self.assertEqual(self.chr_rom_stop, self.rom.chr_rom_stop)
def test_chr_rom(self):
"""Check the CHR ROM."""
size = (self.chr_rom_stop - self.chr_rom_start)
self.assertEqual(size, len(self.rom.chr_rom))
|
class ShouldReadROMHeaderTestCase(object):
'''The general form of a test case to check the header of a ROM.'''
def setUp(self):
'''Perform setup before each test.'''
pass
def test_header_length(self):
'''Check the length of the header.'''
pass
def test_prg_rom_size(self):
'''Check the PRG ROM size.'''
pass
def test_chr_rom_size(self):
'''Check the CHR ROM size.'''
pass
def test_prg_ram_size(self):
'''Check the PRG RAM size.'''
pass
def test_mapper(self):
'''Check the mapper number.'''
pass
def test_is_ignore_mirroring(self):
'''Check whether the ROM is ignoring the mirroring mode.'''
pass
def test_has_trainer(self):
'''Check whether the ROM has a trainer block or not.'''
pass
def test_has_battery_backed_ram(self):
'''Check whether the ROM has battery-backed RAM.'''
pass
def test_is_vertical_mirroring(self):
'''Check the mirroring mode of the ROM.'''
pass
def test_has_play_choice_10(self):
'''Check whether the ROM uses PlayChoice-10.'''
pass
def test_has_vs_unisystem(self):
'''Check whether the ROM uses a VS Unisystem.'''
pass
def test_is_pal(self):
'''Check which TV mode the ROM is designed for.'''
pass
def test_trainer_rom_start(self):
'''Check the starting address of trainer ROM.'''
pass
def test_trainer_rom_stop(self):
'''Check the stopping address of trainer ROM.'''
pass
def test_trainer_rom_start(self):
'''Check the trainer ROM.'''
pass
def test_prg_rom_start(self):
'''Check the starting address of PRG ROM.'''
pass
def test_prg_rom_stop(self):
'''Check the stopping address of PRG ROM.'''
pass
def test_prg_rom_size(self):
'''Check the PRG ROM.'''
pass
def test_chr_rom_start(self):
'''Check the starting address of CHR ROM.'''
pass
def test_chr_rom_stop(self):
'''Check the stopping address of CHR ROM.'''
pass
def test_chr_rom_size(self):
'''Check the CHR ROM.'''
pass
| 23 | 23 | 3 | 0 | 2 | 1 | 1 | 0.58 | 1 | 1 | 1 | 6 | 22 | 1 | 22 | 22 | 152 | 40 | 71 | 50 | 48 | 41 | 71 | 50 | 48 | 1 | 1 | 0 | 22 |
142,228 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_rom.py
|
nes_py.tests.test_rom.ShouldNotCreateInstanceOfROMWithInvaldPath
|
class ShouldNotCreateInstanceOfROMWithInvaldPath(TestCase):
def test(self):
self.assertRaises(TypeError, lambda: ROM(5))
self.assertRaises(ValueError, lambda: ROM('not a path'))
|
class ShouldNotCreateInstanceOfROMWithInvaldPath(TestCase):
def test(self):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 3 | 1 | 0 | 1 | 0 | 1 | 73 | 4 | 0 | 4 | 2 | 2 | 0 | 4 | 2 | 2 | 1 | 2 | 0 | 1 |
142,229 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldRaiseErrorOnStepBeforeReset
|
class ShouldRaiseErrorOnStepBeforeReset(TestCase):
def test(self):
env = NESEnv(rom_file_abs_path('super-mario-bros-1.nes'))
self.assertRaises(ValueError, env.step, 0)
|
class ShouldRaiseErrorOnStepBeforeReset(TestCase):
def test(self):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 73 | 4 | 0 | 4 | 3 | 2 | 0 | 4 | 3 | 2 | 1 | 2 | 0 | 1 |
142,230 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_multiple_makes.py
|
nes_py.tests.test_multiple_makes.ShouldMakeMultipleEnvironmentsParallel
|
class ShouldMakeMultipleEnvironmentsParallel(object):
"""An abstract test case to make environments in parallel."""
# the class to the parallel initializer (Thread, Process, etc.)
parallel_initializer = lambda target, args: None
# the number of parallel executions
num_execs = 4
# the number of steps to take per environment
steps = 10
def test(self):
procs = [None] * self.num_execs
args = (self.steps, )
# spawn the parallel instances
for idx in range(self.num_execs):
procs[idx] = self.parallel_initializer(target=play, args=args)
procs[idx].start()
# join the parallel instances
for proc in procs:
proc.join()
|
class ShouldMakeMultipleEnvironmentsParallel(object):
'''An abstract test case to make environments in parallel.'''
def test(self):
pass
| 2 | 1 | 10 | 0 | 8 | 2 | 3 | 0.5 | 1 | 1 | 0 | 2 | 1 | 0 | 1 | 1 | 22 | 4 | 12 | 9 | 10 | 6 | 12 | 9 | 10 | 3 | 1 | 1 | 3 |
142,231 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_rom.py
|
nes_py.tests.test_rom.ShouldReadSuperMarioBros3
|
class ShouldReadSuperMarioBros3(ShouldReadROMHeaderTestCase, TestCase):
"""Check the Super Mario Bros 3 ROM."""
rom_name = 'super-mario-bros-3.nes'
prg_rom_size = 256
chr_rom_size = 128
prg_ram_size = 8
mapper = 4
is_ignore_mirroring = False
has_trainer = False
has_battery_backed_ram = False
is_vertical_mirroring = False
has_play_choice_10 = False
has_vs_unisystem = False
is_pal = False
trainer_rom_start = 16
trainer_rom_stop = 16
prg_rom_start = 16
prg_rom_stop = 16 + 256 * 2**10
chr_rom_start = 16 + 256 * 2**10
chr_rom_stop = (16 + 256 * 2**10) + (128 * 2**10)
|
class ShouldReadSuperMarioBros3(ShouldReadROMHeaderTestCase, TestCase):
'''Check the Super Mario Bros 3 ROM.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.05 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 21 | 1 | 19 | 19 | 18 | 1 | 19 | 19 | 18 | 0 | 2 | 0 | 0 |
142,232 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/_image_viewer.py
|
nes_py._image_viewer.ImageViewer
|
class ImageViewer(object):
"""A simple class for viewing images using pyglet."""
def __init__(self, caption, height, width,
monitor_keyboard=False,
relevant_keys=None
):
"""
Initialize a new image viewer.
Args:
caption (str): the caption/title for the window
height (int): the height of the window
width (int): the width of the window
monitor_keyboard: whether to monitor events from the keyboard
relevant_keys: the relevant keys to monitor events from
Returns:
None
"""
# detect if rendering from python threads and fail
import threading
if threading.current_thread() is not threading.main_thread():
msg = 'rendering from python threads is not supported'
raise RuntimeError(msg)
# import pyglet within class scope to resolve issues with how pyglet
# interacts with OpenGL while using multiprocessing
import pyglet
self.pyglet = pyglet
# a mapping from pyglet key identifiers to native identifiers
self.KEY_MAP = {
self.pyglet.window.key.ENTER: ord('\r'),
self.pyglet.window.key.SPACE: ord(' '),
}
self.caption = caption
self.height = height
self.width = width
self.monitor_keyboard = monitor_keyboard
self.relevant_keys = relevant_keys
self._window = None
self._pressed_keys = []
self._is_escape_pressed = False
@property
def is_open(self):
"""Return a boolean determining if this window is open."""
return self._window is not None
@property
def is_escape_pressed(self):
"""Return True if the escape key is pressed."""
return self._is_escape_pressed
@property
def pressed_keys(self):
"""Return a sorted list of the pressed keys."""
return tuple(sorted(self._pressed_keys))
def _handle_key_event(self, symbol, is_press):
"""
Handle a key event.
Args:
symbol: the symbol in the event
is_press: whether the event is a press or release
Returns:
None
"""
# remap the key to the expected domain
symbol = self.KEY_MAP.get(symbol, symbol)
# check if the symbol is the escape key
if symbol == self.pyglet.window.key.ESCAPE:
self._is_escape_pressed = is_press
return
# make sure the symbol is relevant
if self.relevant_keys is not None and symbol not in self.relevant_keys:
return
# handle the press / release by appending / removing the key to pressed
if is_press:
self._pressed_keys.append(symbol)
else:
self._pressed_keys.remove(symbol)
def on_key_press(self, symbol, modifiers):
"""Respond to a key press on the keyboard."""
self._handle_key_event(symbol, True)
def on_key_release(self, symbol, modifiers):
"""Respond to a key release on the keyboard."""
self._handle_key_event(symbol, False)
def open(self):
"""Open the window."""
# create a window for this image viewer instance
self._window = self.pyglet.window.Window(
caption=self.caption,
height=self.height,
width=self.width,
vsync=False,
resizable=True,
)
# add keyboard event monitors if enabled
if self.monitor_keyboard:
self._window.event(self.on_key_press)
self._window.event(self.on_key_release)
def close(self):
"""Close the window."""
if self.is_open:
self._window.close()
self._window = None
def show(self, frame):
"""
Show an array of pixels on the window.
Args:
frame (numpy.ndarray): the frame to show on the window
Returns:
None
"""
# check that the frame has the correct dimensions
if len(frame.shape) != 3:
raise ValueError('frame should have shape with only 3 dimensions')
# open the window if it isn't open already
if not self.is_open:
self.open()
# prepare the window for the next frame
self._window.clear()
self._window.switch_to()
self._window.dispatch_events()
# create an image data object
image = self.pyglet.image.ImageData(
frame.shape[1],
frame.shape[0],
'RGB',
frame.tobytes(),
pitch=frame.shape[1]*-3
)
# send the image to the window
image.blit(0, 0, width=self._window.width, height=self._window.height)
self._window.flip()
|
class ImageViewer(object):
'''A simple class for viewing images using pyglet.'''
def __init__(self, caption, height, width,
monitor_keyboard=False,
relevant_keys=None
):
'''
Initialize a new image viewer.
Args:
caption (str): the caption/title for the window
height (int): the height of the window
width (int): the width of the window
monitor_keyboard: whether to monitor events from the keyboard
relevant_keys: the relevant keys to monitor events from
Returns:
None
'''
pass
@property
def is_open(self):
'''Return a boolean determining if this window is open.'''
pass
@property
def is_escape_pressed(self):
'''Return True if the escape key is pressed.'''
pass
@property
def pressed_keys(self):
'''Return a sorted list of the pressed keys.'''
pass
def _handle_key_event(self, symbol, is_press):
'''
Handle a key event.
Args:
symbol: the symbol in the event
is_press: whether the event is a press or release
Returns:
None
'''
pass
def on_key_press(self, symbol, modifiers):
'''Respond to a key press on the keyboard.'''
pass
def on_key_release(self, symbol, modifiers):
'''Respond to a key release on the keyboard.'''
pass
def open(self):
'''Open the window.'''
pass
def close(self):
'''Close the window.'''
pass
def show(self, frame):
'''
Show an array of pixels on the window.
Args:
frame (numpy.ndarray): the frame to show on the window
Returns:
None
'''
pass
| 14 | 11 | 13 | 1 | 8 | 5 | 2 | 0.62 | 1 | 3 | 0 | 0 | 10 | 10 | 10 | 10 | 146 | 18 | 79 | 31 | 60 | 49 | 57 | 25 | 44 | 4 | 1 | 1 | 18 |
142,233 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/_rom.py
|
nes_py._rom.ROM
|
class ROM(object):
"""An abstraction of the NES Read-Only Memory (ROM)."""
# the magic bytes expected at the first four bytes of the header.
# It spells "NES<END>"
_MAGIC = np.array([0x4E, 0x45, 0x53, 0x1A])
def __init__(self, rom_path):
"""
Initialize a new ROM.
Args:
rom_path (str): the path to the ROM file
Returns:
None
"""
# make sure the rom path is a string
if not isinstance(rom_path, str):
raise TypeError('rom_path must be of type: str.')
# make sure the rom path exists
if not os.path.exists(rom_path):
msg = 'rom_path points to non-existent file: {}.'.format(rom_path)
raise ValueError(msg)
# read the binary data in the .nes ROM file
self.raw_data = np.fromfile(rom_path, dtype='uint8')
# ensure the first 4 bytes are 0x4E45531A (NES<EOF>)
if not np.array_equal(self._magic, self._MAGIC):
raise ValueError('ROM missing magic number in header.')
if self._zero_fill != 0:
raise ValueError("ROM header zero fill bytes are not zero.")
#
# MARK: Header
#
@property
def header(self):
"""Return the header of the ROM file as bytes."""
return self.raw_data[:16]
@property
def _magic(self):
"""Return the magic bytes in the first 4 bytes."""
return self.header[:4]
@property
def prg_rom_size(self):
"""Return the size of the PRG ROM in KB."""
return 16 * self.header[4]
@property
def chr_rom_size(self):
"""Return the size of the CHR ROM in KB."""
return 8 * self.header[5]
@property
def flags_6(self):
"""Return the flags at the 6th byte of the header."""
return '{:08b}'.format(self.header[6])
@property
def flags_7(self):
"""Return the flags at the 7th byte of the header."""
return '{:08b}'.format(self.header[7])
@property
def prg_ram_size(self):
"""Return the size of the PRG RAM in KB."""
size = self.header[8]
# size becomes 8 when it's zero for compatibility
if size == 0:
size = 1
return 8 * size
@property
def flags_9(self):
"""Return the flags at the 9th byte of the header."""
return '{:08b}'.format(self.header[9])
@property
def flags_10(self):
"""
Return the flags at the 10th byte of the header.
Notes:
- these flags are not part of official specification.
- ignored in this emulator
"""
return '{:08b}'.format(self.header[10])
@property
def _zero_fill(self):
"""Return the zero fill bytes at the end of the header."""
return self.header[11:].sum()
#
# MARK: Header Flags
#
@property
def mapper(self):
"""Return the mapper number this ROM uses."""
# the high nibble is in flags 7, the low nibble is in flags 6
return int(self.flags_7[:4] + self.flags_6[:4], 2)
@property
def is_ignore_mirroring(self):
"""Return a boolean determining if the ROM ignores mirroring."""
return bool(int(self.flags_6[4]))
@property
def has_trainer(self):
"""Return a boolean determining if the ROM has a trainer block."""
return bool(int(self.flags_6[5]))
@property
def has_battery_backed_ram(self):
"""Return a boolean determining if the ROM has a battery-backed RAM."""
return bool(int(self.flags_6[6]))
@property
def is_vertical_mirroring(self):
"""Return the mirroring mode this ROM uses."""
return bool(int(self.flags_6[7]))
@property
def has_play_choice_10(self):
"""
Return whether this cartridge uses PlayChoice-10.
Note:
- Play-Choice 10 uses different color palettes for a different PPU
- ignored in this emulator
"""
return bool(int(self.flags_7[6]))
@property
def has_vs_unisystem(self):
"""
Return whether this cartridge has VS Uni-system.
Note:
VS Uni-system is for ROMs that have a coin slot (Arcades).
- ignored in this emulator
"""
return bool(int(self.flags_7[7]))
@property
def is_pal(self):
"""Return the TV system this ROM supports."""
return bool(int(self.flags_9[7]))
#
# MARK: ROM
#
@property
def trainer_rom_start(self):
"""The inclusive starting index of the trainer ROM."""
return 16
@property
def trainer_rom_stop(self):
"""The exclusive stopping index of the trainer ROM."""
if self.has_trainer:
return 16 + 512
else:
return 16
@property
def trainer_rom(self):
"""Return the trainer ROM of the ROM file."""
return self.raw_data[self.trainer_rom_start:self.trainer_rom_stop]
@property
def prg_rom_start(self):
"""The inclusive starting index of the PRG ROM."""
return self.trainer_rom_stop
@property
def prg_rom_stop(self):
"""The exclusive stopping index of the PRG ROM."""
return self.prg_rom_start + self.prg_rom_size * 2**10
@property
def prg_rom(self):
"""Return the PRG ROM of the ROM file."""
try:
return self.raw_data[self.prg_rom_start:self.prg_rom_stop]
except IndexError:
raise ValueError('failed to read PRG-ROM on ROM.')
@property
def chr_rom_start(self):
"""The inclusive starting index of the CHR ROM."""
return self.prg_rom_stop
@property
def chr_rom_stop(self):
"""The exclusive stopping index of the CHR ROM."""
return self.chr_rom_start + self.chr_rom_size * 2**10
@property
def chr_rom(self):
"""Return the CHR ROM of the ROM file."""
try:
return self.raw_data[self.chr_rom_start:self.chr_rom_stop]
except IndexError:
raise ValueError('failed to read CHR-ROM on ROM.')
|
class ROM(object):
'''An abstraction of the NES Read-Only Memory (ROM).'''
def __init__(self, rom_path):
'''
Initialize a new ROM.
Args:
rom_path (str): the path to the ROM file
Returns:
None
'''
pass
@property
def header(self):
'''Return the header of the ROM file as bytes.'''
pass
@property
def _magic(self):
'''Return the magic bytes in the first 4 bytes.'''
pass
@property
def prg_rom_size(self):
'''Return the size of the PRG ROM in KB.'''
pass
@property
def chr_rom_size(self):
'''Return the size of the CHR ROM in KB.'''
pass
@property
def flags_6(self):
'''Return the flags at the 6th byte of the header.'''
pass
@property
def flags_7(self):
'''Return the flags at the 7th byte of the header.'''
pass
@property
def prg_ram_size(self):
'''Return the size of the PRG RAM in KB.'''
pass
@property
def flags_9(self):
'''Return the flags at the 9th byte of the header.'''
pass
@property
def flags_10(self):
'''
Return the flags at the 10th byte of the header.
Notes:
- these flags are not part of official specification.
- ignored in this emulator
'''
pass
@property
def _zero_fill(self):
'''Return the zero fill bytes at the end of the header.'''
pass
@property
def mapper(self):
'''Return the mapper number this ROM uses.'''
pass
@property
def is_ignore_mirroring(self):
'''Return a boolean determining if the ROM ignores mirroring.'''
pass
@property
def has_trainer(self):
'''Return a boolean determining if the ROM has a trainer block.'''
pass
@property
def has_battery_backed_ram(self):
'''Return a boolean determining if the ROM has a battery-backed RAM.'''
pass
@property
def is_vertical_mirroring(self):
'''Return the mirroring mode this ROM uses.'''
pass
@property
def has_play_choice_10(self):
'''
Return whether this cartridge uses PlayChoice-10.
Note:
- Play-Choice 10 uses different color palettes for a different PPU
- ignored in this emulator
'''
pass
@property
def has_vs_unisystem(self):
'''
Return whether this cartridge has VS Uni-system.
Note:
VS Uni-system is for ROMs that have a coin slot (Arcades).
- ignored in this emulator
'''
pass
@property
def is_pal(self):
'''Return the TV system this ROM supports.'''
pass
@property
def trainer_rom_start(self):
'''The inclusive starting index of the trainer ROM.'''
pass
@property
def trainer_rom_stop(self):
'''The exclusive stopping index of the trainer ROM.'''
pass
@property
def trainer_rom_start(self):
'''Return the trainer ROM of the ROM file.'''
pass
@property
def prg_rom_start(self):
'''The inclusive starting index of the PRG ROM.'''
pass
@property
def prg_rom_stop(self):
'''The exclusive stopping index of the PRG ROM.'''
pass
@property
def prg_rom_size(self):
'''Return the PRG ROM of the ROM file.'''
pass
@property
def chr_rom_start(self):
'''The inclusive starting index of the CHR ROM.'''
pass
@property
def chr_rom_stop(self):
'''The exclusive stopping index of the CHR ROM.'''
pass
@property
def chr_rom_size(self):
'''Return the CHR ROM of the ROM file.'''
pass
| 56 | 29 | 5 | 0 | 3 | 2 | 1 | 0.63 | 1 | 6 | 0 | 0 | 28 | 1 | 28 | 28 | 215 | 42 | 106 | 60 | 50 | 67 | 78 | 33 | 49 | 5 | 1 | 1 | 36 |
142,234 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_multiple_makes.py
|
nes_py.tests.test_multiple_makes.ProcessTest
|
class ProcessTest(ShouldMakeMultipleEnvironmentsParallel, TestCase):
"""Test that processes (true multi-threading) work."""
parallel_initializer = Process
|
class ProcessTest(ShouldMakeMultipleEnvironmentsParallel, TestCase):
'''Test that processes (true multi-threading) work.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 73 | 3 | 0 | 2 | 2 | 1 | 1 | 2 | 2 | 1 | 0 | 2 | 0 | 0 |
142,235 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_multiple_makes.py
|
nes_py.tests.test_multiple_makes.ShouldMakeMultipleEnvironmentsSingleThread
|
class ShouldMakeMultipleEnvironmentsSingleThread(TestCase):
"""Test making 4 environments in a single code stream."""
# the number of environments to spawn
num_envs = 4
# the number of steps to take per environment
steps = 10
def test(self):
path = rom_file_abs_path('super-mario-bros-1.nes')
envs = [NESEnv(path) for _ in range(self.num_envs)]
dones = [True] * self.num_envs
for _ in range(self.steps):
for idx in range(self.num_envs):
if dones[idx]:
_ = envs[idx].reset()
action = envs[idx].action_space.sample()
_, _, dones[idx], _ = envs[idx].step(action)
|
class ShouldMakeMultipleEnvironmentsSingleThread(TestCase):
'''Test making 4 environments in a single code stream.'''
def test(self):
pass
| 2 | 1 | 11 | 1 | 10 | 0 | 4 | 0.23 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 73 | 20 | 4 | 13 | 9 | 11 | 3 | 13 | 9 | 11 | 4 | 2 | 3 | 4 |
142,236 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_multiple_makes.py
|
nes_py.tests.test_multiple_makes.ThreadTest
|
class ThreadTest(ShouldMakeMultipleEnvironmentsParallel, TestCase):
"""Test that threads (internal parallelism) work"""
parallel_initializer = Thread
|
class ThreadTest(ShouldMakeMultipleEnvironmentsParallel, TestCase):
'''Test that threads (internal parallelism) work'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.5 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 73 | 3 | 0 | 2 | 2 | 1 | 1 | 2 | 2 | 1 | 0 | 2 | 0 | 0 |
142,237 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_rom.py
|
nes_py.tests.test_rom.ShouldReadSMBLostLevels
|
class ShouldReadSMBLostLevels(ShouldReadROMHeaderTestCase, TestCase):
"""Check the Super Mario Bros Lost Levels ROM."""
rom_name = 'super-mario-bros-lost-levels.nes'
prg_rom_size = 32
chr_rom_size = 8
prg_ram_size = 8
mapper = 0
is_ignore_mirroring = False
has_trainer = False
has_battery_backed_ram = False
is_vertical_mirroring = True
has_play_choice_10 = False
has_vs_unisystem = False
is_pal = False
trainer_rom_start = 16
trainer_rom_stop = 16
prg_rom_start = 16
prg_rom_stop = 16 + 32 * 2**10
chr_rom_start = 16 + 32 * 2**10
chr_rom_stop = (16 + 32 * 2**10) + (8 * 2**10)
|
class ShouldReadSMBLostLevels(ShouldReadROMHeaderTestCase, TestCase):
'''Check the Super Mario Bros Lost Levels ROM.'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.05 | 2 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 21 | 1 | 19 | 19 | 18 | 1 | 19 | 19 | 18 | 0 | 2 | 0 | 0 |
142,238 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldCreateInstanceOfNESEnv
|
class ShouldCreateInstanceOfNESEnv(TestCase):
def test(self):
env = NESEnv(rom_file_abs_path('super-mario-bros-1.nes'))
self.assertIsInstance(env, gym.Env)
env.close()
|
class ShouldCreateInstanceOfNESEnv(TestCase):
def test(self):
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 1 | 73 | 5 | 0 | 5 | 3 | 3 | 0 | 5 | 3 | 3 | 1 | 2 | 0 | 1 |
142,239 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/wrappers/joypad_space.py
|
nes_py.wrappers.joypad_space.JoypadSpace
|
class JoypadSpace(Wrapper):
"""An environment wrapper to convert binary to discrete action space."""
# a mapping of buttons to binary values
_button_map = {
'right': 0b10000000,
'left': 0b01000000,
'down': 0b00100000,
'up': 0b00010000,
'start': 0b00001000,
'select': 0b00000100,
'B': 0b00000010,
'A': 0b00000001,
'NOOP': 0b00000000,
}
@classmethod
def buttons(cls) -> list:
"""Return the buttons that can be used as actions."""
return list(cls._button_map.keys())
def __init__(self, env: Env, actions: list):
"""
Initialize a new binary to discrete action space wrapper.
Args:
env: the environment to wrap
actions: an ordered list of actions (as lists of buttons).
The index of each button list is its discrete coded value
Returns:
None
"""
super().__init__(env)
# create the new action space
self.action_space = gym.spaces.Discrete(len(actions))
# create the action map from the list of discrete actions
self._action_map = {}
self._action_meanings = {}
# iterate over all the actions (as button lists)
for action, button_list in enumerate(actions):
# the value of this action's bitmap
byte_action = 0
# iterate over the buttons in this button list
for button in button_list:
byte_action |= self._button_map[button]
# set this action maps value to the byte action value
self._action_map[action] = byte_action
self._action_meanings[action] = ' '.join(button_list)
def step(self, action):
"""
Take a step using the given action.
Args:
action (int): the discrete action to perform
Returns:
a tuple of:
- (numpy.ndarray) the state as a result of the action
- (float) the reward achieved by taking the action
- (bool) a flag denoting whether the episode has ended
- (dict) a dictionary of extra information
"""
# take the step and record the output
return self.env.step(self._action_map[action])
def reset(self):
"""Reset the environment and return the initial observation."""
return self.env.reset()
def get_keys_to_action(self):
"""Return the dictionary of keyboard keys to actions."""
# get the old mapping of keys to actions
old_keys_to_action = self.env.unwrapped.get_keys_to_action()
# invert the keys to action mapping to lookup key combos by action
action_to_keys = {v: k for k, v in old_keys_to_action.items()}
# create a new mapping of keys to actions
keys_to_action = {}
# iterate over the actions and their byte values in this mapper
for action, byte in self._action_map.items():
# get the keys to press for the action
keys = action_to_keys[byte]
# set the keys value in the dictionary to the current discrete act
keys_to_action[keys] = action
return keys_to_action
def get_action_meanings(self):
"""Return a list of actions meanings."""
actions = sorted(self._action_meanings.keys())
return [self._action_meanings[action] for action in actions]
|
class JoypadSpace(Wrapper):
'''An environment wrapper to convert binary to discrete action space.'''
@classmethod
def buttons(cls) -> list:
'''Return the buttons that can be used as actions.'''
pass
def __init__(self, env: Env, actions: list):
'''
Initialize a new binary to discrete action space wrapper.
Args:
env: the environment to wrap
actions: an ordered list of actions (as lists of buttons).
The index of each button list is its discrete coded value
Returns:
None
'''
pass
def step(self, action):
'''
Take a step using the given action.
Args:
action (int): the discrete action to perform
Returns:
a tuple of:
- (numpy.ndarray) the state as a result of the action
- (float) the reward achieved by taking the action
- (bool) a flag denoting whether the episode has ended
- (dict) a dictionary of extra information
'''
pass
def reset(self):
'''Reset the environment and return the initial observation.'''
pass
def get_keys_to_action(self):
'''Return the dictionary of keyboard keys to actions.'''
pass
def get_action_meanings(self):
'''Return a list of actions meanings.'''
pass
| 8 | 7 | 12 | 1 | 5 | 6 | 2 | 0.95 | 1 | 3 | 0 | 0 | 5 | 3 | 6 | 6 | 94 | 14 | 41 | 21 | 33 | 39 | 30 | 20 | 23 | 3 | 1 | 2 | 9 |
142,240 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldRaiseTypeErrorOnInvalidROMPathType
|
class ShouldRaiseTypeErrorOnInvalidROMPathType(TestCase):
def test(self):
self.assertRaises(TypeError, NESEnv, 0)
|
class ShouldRaiseTypeErrorOnInvalidROMPathType(TestCase):
def test(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 73 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 2 | 0 | 1 |
142,241 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldRaiseValueErrorOnInvalidiNES_ROMPath
|
class ShouldRaiseValueErrorOnInvalidiNES_ROMPath(TestCase):
def test(self):
self.assertRaises(ValueError, NESEnv, rom_file_abs_path('empty.nes'))
|
class ShouldRaiseValueErrorOnInvalidiNES_ROMPath(TestCase):
def test(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 73 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 2 | 0 | 1 |
142,242 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldRaiseValueErrorOnMissingNonexistentROMFile
|
class ShouldRaiseValueErrorOnMissingNonexistentROMFile(TestCase):
def test(self):
path = rom_file_abs_path('missing.nes')
self.assertRaises(ValueError, NESEnv, path)
|
class ShouldRaiseValueErrorOnMissingNonexistentROMFile(TestCase):
def test(self):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 73 | 4 | 0 | 4 | 3 | 2 | 0 | 4 | 3 | 2 | 1 | 2 | 0 | 1 |
142,243 |
Kautenja/nes-py
|
Kautenja_nes-py/nes_py/tests/test_nes_env.py
|
nes_py.tests.test_nes_env.ShouldRaiseValueErrorOnNonexistentFile
|
class ShouldRaiseValueErrorOnNonexistentFile(TestCase):
def test(self):
self.assertRaises(ValueError, NESEnv, 'not_a_file.nes')
|
class ShouldRaiseValueErrorOnNonexistentFile(TestCase):
def test(self):
pass
| 2 | 0 | 2 | 0 | 2 | 0 | 1 | 0 | 1 | 2 | 1 | 0 | 1 | 0 | 1 | 73 | 3 | 0 | 3 | 2 | 1 | 0 | 3 | 2 | 1 | 1 | 2 | 0 | 1 |
142,244 |
Keda87/python-quran-odoa
|
Keda87_python-quran-odoa/odoa.py
|
odoa.ODOAException
|
class ODOAException(Exception):
pass
|
class ODOAException(Exception):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
142,245 |
Keda87/python-quran-odoa
|
Keda87_python-quran-odoa/odoa.py
|
odoa.Quran
|
class Quran(object):
__slots__ = ['ayah', 'desc', 'translate', 'sound']
def __init__(self, ayah: str, desc: str, translate: str, sound: str):
self.ayah = ayah
self.desc = desc
self.translate = translate
self.sound = sound
def __repr__(self):
return f'<{self.__class__.__name__}: {self.desc}>'
|
class Quran(object):
def __init__(self, ayah: str, desc: str, translate: str, sound: str):
pass
def __repr__(self):
pass
| 3 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 2 | 4 | 2 | 2 | 11 | 2 | 9 | 8 | 6 | 0 | 9 | 8 | 6 | 1 | 1 | 0 | 2 |
142,246 |
Keda87/python-quran-odoa
|
Keda87_python-quran-odoa/tests.py
|
tests.ODOATest
|
class ODOATest(unittest.TestCase):
def setUp(self) -> None:
self.odoa = ODOA()
def test_get_surah(self):
coro = self.odoa.get_random_surah()
surah = LOOP.run_until_complete(coro)
self.assertIsNotNone(surah)
def test_get_surah_english(self):
coro = self.odoa.get_random_surah(lang='en')
surah = LOOP.run_until_complete(coro)
self.assertIsNotNone(surah)
def test_not_supported_language(self):
with self.assertRaises(ODOAException):
coro = self.odoa.get_random_surah('fr')
LOOP.run_until_complete(coro)
|
class ODOATest(unittest.TestCase):
def setUp(self) -> None:
pass
def test_get_surah(self):
pass
def test_get_surah_english(self):
pass
def test_not_supported_language(self):
pass
| 5 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 2 | 2 | 0 | 4 | 1 | 4 | 76 | 19 | 4 | 15 | 11 | 10 | 0 | 15 | 11 | 10 | 1 | 2 | 1 | 4 |
142,247 |
Keda87/python-quran-odoa
|
Keda87_python-quran-odoa/odoa.py
|
odoa.ODOA
|
class ODOA(object):
__slots__ = ['__TOTAL_SURAH', '__BASE_API', '__SUPPORTED_LANGUAGES']
def __init__(self) -> None:
self.__TOTAL_SURAH = 114 # https://en.wikipedia.org/wiki/List_of_surahs_in_the_Quran
self.__BASE_API = 'https://raw.githubusercontent.com/Keda87/quranjson/master/source'
self.__SUPPORTED_LANGUAGES = ['id', 'en']
async def get_random_surah(self, lang: str = 'id') -> Quran:
if lang not in self.__SUPPORTED_LANGUAGES:
message = 'Currently your selected language not supported yet.'
raise ODOAException(message)
rand_surah = random.randint(1, self.__TOTAL_SURAH)
surah_url = f'{self.__BASE_API}/surah/surah_{rand_surah}.json'
try:
response = await self.__fetch(surah_url)
data = response.json()
except IOError:
raise ODOAException
else:
random_ayah = random.randint(1, int(data.get('count')))
ayah_key = f'verse_{random_ayah}'
ayah = data['verse'][ayah_key]
surah_index = data.get('index')
surah_name = data.get('name')
translation = await self.__get_translation(surah_index, ayah_key, lang)
sound = self.__get_sound(surah_index, random_ayah)
desc = f'{surah_name}:{random_ayah}'
return Quran(ayah, desc, translation, sound)
async def __get_translation(self, surah: int, ayah, lang: str) -> str:
url = f'{self.__BASE_API}/translations/{lang}/{lang}_translation_{int(surah)}.json'
try:
response = await self.__fetch(url)
data = response.json()
return data['verse'][ayah]
except ODOAException as e:
raise e
def __get_sound(self, surah: int, ayah: int) -> str:
format_ayah = str(ayah).zfill(3)
return f'{self.__BASE_API}/sounds/{surah}/{format_ayah}.mp3'
@staticmethod
async def __fetch(url: str) -> Response:
async with httpx.AsyncClient() as client:
return await client.get(url)
def __repr__(self):
return f'<{self.__class__.__name__}>'
|
class ODOA(object):
def __init__(self) -> None:
pass
async def get_random_surah(self, lang: str = 'id') -> Quran:
pass
async def __get_translation(self, surah: int, ayah, lang: str) -> str:
pass
def __get_sound(self, surah: int, ayah: int) -> str:
pass
@staticmethod
async def __fetch(url: str) -> Response:
pass
def __repr__(self):
pass
| 8 | 0 | 7 | 0 | 7 | 0 | 2 | 0.02 | 1 | 5 | 2 | 0 | 5 | 3 | 6 | 6 | 52 | 8 | 44 | 31 | 36 | 1 | 43 | 28 | 36 | 3 | 1 | 1 | 9 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.