id
int64 0
843k
| repository_name
stringlengths 7
55
| file_path
stringlengths 9
332
| class_name
stringlengths 3
290
| human_written_code
stringlengths 12
4.36M
| class_skeleton
stringlengths 19
2.2M
| total_program_units
int64 1
9.57k
| total_doc_str
int64 0
4.2k
| AvgCountLine
float64 0
7.89k
| AvgCountLineBlank
float64 0
300
| AvgCountLineCode
float64 0
7.89k
| AvgCountLineComment
float64 0
7.89k
| AvgCyclomatic
float64 0
130
| CommentToCodeRatio
float64 0
176
| CountClassBase
float64 0
48
| CountClassCoupled
float64 0
589
| CountClassCoupledModified
float64 0
581
| CountClassDerived
float64 0
5.37k
| CountDeclInstanceMethod
float64 0
4.2k
| CountDeclInstanceVariable
float64 0
299
| CountDeclMethod
float64 0
4.2k
| CountDeclMethodAll
float64 0
4.2k
| CountLine
float64 1
115k
| CountLineBlank
float64 0
9.01k
| CountLineCode
float64 0
94.4k
| CountLineCodeDecl
float64 0
46.1k
| CountLineCodeExe
float64 0
91.3k
| CountLineComment
float64 0
27k
| CountStmt
float64 1
93.2k
| CountStmtDecl
float64 0
46.1k
| CountStmtExe
float64 0
90.2k
| MaxCyclomatic
float64 0
759
| MaxInheritanceTree
float64 0
16
| MaxNesting
float64 0
34
| SumCyclomatic
float64 0
6k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8,000 |
Autodesk/aomi
|
Autodesk_aomi/aomi/model/generic.py
|
aomi.model.generic.VarFile
|
class VarFile(Generic):
"""Generic VarFile"""
required_fields = ['path', 'mount', 'var_file']
resource_key = 'var_file'
def secrets(self):
return [self.secret]
def __init__(self, obj, opt):
super(VarFile, self).__init__(obj, opt)
self.secret = obj['var_file']
self.filename = obj['var_file']
def obj(self):
filename = hard_path(self.filename, self.opt.secrets)
secret_file(filename)
template_obj = load_vars(self.opt)
return load_var_file(filename, template_obj)
|
class VarFile(Generic):
'''Generic VarFile'''
def secrets(self):
pass
def __init__(self, obj, opt):
pass
def obj(self):
pass
| 4 | 1 | 4 | 0 | 4 | 0 | 1 | 0.07 | 1 | 1 | 0 | 0 | 3 | 2 | 3 | 25 | 18 | 3 | 14 | 10 | 10 | 1 | 14 | 10 | 10 | 1 | 4 | 0 | 3 |
8,001 |
Autodesk/aomi
|
Autodesk_aomi/aomi/model/resource.py
|
aomi.model.resource.Latent
|
class Latent(Resource):
"""Latent Secret
A latent secret is tracked only within icefiles. It will never be
used as part of interactions with HCVault"""
required_fields = []
resource_key = 'latent_file'
config_key = 'secrets'
no_resource = True
def secrets(self):
return [self.secret]
def __init__(self, obj, opt):
super(Latent, self).__init__(obj, opt)
self.secret = obj['latent_file']
def obj(self):
filename = hard_path(self.secret, self.opt.secrets)
secret_file(filename)
return open_maybe_binary(filename)
|
class Latent(Resource):
'''Latent Secret
A latent secret is tracked only within icefiles. It will never be
used as part of interactions with HCVault'''
def secrets(self):
pass
def __init__(self, obj, opt):
pass
def obj(self):
pass
| 4 | 1 | 3 | 0 | 3 | 0 | 1 | 0.21 | 1 | 1 | 0 | 0 | 3 | 1 | 3 | 24 | 20 | 3 | 14 | 10 | 10 | 3 | 14 | 10 | 10 | 1 | 2 | 0 | 3 |
8,002 |
Autodesk/aomi
|
Autodesk_aomi/aomi/model/generic.py
|
aomi.model.generic.Generated
|
class Generated(Generic):
"""Generic Generated"""
required_fields = ['mount', 'path', 'keys']
resource_key = 'generated'
# why are generated generics stored slight differently
def __init__(self, obj, opt):
super(Generated, self).__init__(obj['generated'], opt)
for key in obj['generated']['keys']:
check_obj(['name', 'method'], 'generated secret entry', key)
self.keys = obj['generated']['keys']
def generate_obj(self):
"""Generates the secret object, respecting existing information
and user specified options"""
secret_obj = {}
if self.existing:
secret_obj = deepcopy(self.existing)
for key in self.keys:
key_name = key['name']
if self.existing and \
key_name in self.existing and \
not key.get('overwrite'):
LOG.debug("Not overwriting %s/%s", self.path, key_name)
continue
else:
secret_obj[key_name] = generated_key(key)
return secret_obj
def diff(self, obj=None):
if self.present and not self.existing:
return aomi.model.resource.ADD
elif not self.present and self.existing:
return aomi.model.resource.DEL
elif self.present and self.existing:
overwrites = [x for x in self.keys if x.get('overwrite')]
if overwrites:
return aomi.model.resource.OVERWRITE
return aomi.model.resource.NOOP
def sync(self, vault_client):
gen_obj = self.generate_obj()
self._obj = gen_obj
super(Generated, self).sync(vault_client)
|
class Generated(Generic):
'''Generic Generated'''
def __init__(self, obj, opt):
pass
def generate_obj(self):
'''Generates the secret object, respecting existing information
and user specified options'''
pass
def diff(self, obj=None):
pass
def sync(self, vault_client):
pass
| 5 | 2 | 10 | 1 | 8 | 1 | 3 | 0.11 | 1 | 1 | 0 | 0 | 4 | 2 | 4 | 26 | 47 | 7 | 36 | 15 | 31 | 4 | 31 | 15 | 26 | 5 | 4 | 2 | 12 |
8,003 |
Autodesk/aomi
|
Autodesk_aomi/aomi/model/generic.py
|
aomi.model.generic.Files
|
class Files(Generic):
"""Generic File"""
required_fields = ['path', 'mount', 'files']
resource_key = 'files'
def secrets(self):
return [v for _k, v in iteritems(self._obj)]
def __init__(self, obj, opt):
super(Files, self).__init__(obj, opt)
s_obj = {}
for sfile in obj['files']:
s_obj[sfile['name']] = sfile['source']
self._obj = s_obj
def export(self, directory):
for name, filename in iteritems(self._obj):
dest_file = "%s/%s" % (directory, filename)
dest_dir = os.path.dirname(dest_file)
if not os.path.isdir(dest_dir):
os.mkdir(dest_dir, 0o700)
secret_h = open(dest_file, 'w')
secret_h.write(self.existing[name])
secret_h.close()
def obj(self):
s_obj = {}
for name, filename in iteritems(self._obj):
actual_file = hard_path(filename, self.opt.secrets)
secret_file(actual_file)
data = open_maybe_binary(actual_file)
try:
is_unicode_string(data)
s_obj[name] = data
except aomi.exceptions.Validation:
s_obj[name] = portable_b64encode(data)
self.secret_format = 'binary'
return s_obj
def validate(self, obj):
super(Files, self).validate(obj)
for fileobj in obj['files']:
check_obj(['source', 'name'], self.name(), fileobj)
|
class Files(Generic):
'''Generic File'''
def secrets(self):
pass
def __init__(self, obj, opt):
pass
def export(self, directory):
pass
def obj(self):
pass
def validate(self, obj):
pass
| 6 | 1 | 7 | 1 | 7 | 0 | 2 | 0.03 | 1 | 2 | 1 | 0 | 5 | 2 | 5 | 27 | 46 | 8 | 37 | 22 | 31 | 1 | 37 | 21 | 31 | 3 | 4 | 2 | 11 |
8,004 |
Autodesk/aomi
|
Autodesk_aomi/aomi/model/context.py
|
aomi.model.context.Context
|
class Context(object):
"""The overall context of an aomi session"""
@staticmethod
def load(config, opt):
"""Loads and returns a full context object based on the Secretfile"""
ctx = Context(opt)
seed_map = py_resources()
seed_keys = sorted(set([m[0] for m in seed_map]), key=resource_sort)
for config_key in seed_keys:
if config_key not in config:
continue
for resource_config in config[config_key]:
mod = find_model(config_key, resource_config, seed_map)
if not mod:
LOG.warning("unable to find mod for %s", resource_config)
continue
ctx.add(mod(resource_config, opt))
for config_key in config.keys():
if config_key != 'pgp_keys' and \
config_key not in seed_keys:
LOG.warning("missing model for %s", config_key)
return filtered_context(ctx)
def thaw(self, tmp_dir):
"""Will thaw every secret into an appropriate temporary location"""
for resource in self.resources():
if resource.present:
resource.thaw(tmp_dir)
def freeze(self, dest_dir):
"""Freezes every resource within a context"""
for resource in self.resources():
if resource.present:
resource.freeze(dest_dir)
def __init__(self, opt):
self._mounts = []
self._resources = []
self._auths = []
self._logs = []
self.opt = opt
def mounts(self):
"""Secret backends within context"""
return self._mounts
def logs(self):
"""Audit log backends within context"""
return self._logs
def auths(self):
"""Authentication backends within context"""
return self._auths
def resources(self):
"""Vault resources within context"""
res = []
for resource in self._resources:
res = res + resource.resources()
return res
def add(self, resource):
"""Add a resource to the context"""
if isinstance(resource, Resource):
if isinstance(resource, Secret) and \
resource.mount != 'cubbyhole':
ensure_backend(resource,
SecretBackend,
self._mounts,
self.opt,
False)
elif isinstance(resource, Mount):
ensure_backend(resource, SecretBackend, self._mounts, self.opt)
elif isinstance(resource, Auth):
ensure_backend(resource, AuthBackend, self._auths, self.opt)
elif isinstance(resource, AuditLog):
ensure_backend(resource, LogBackend, self._logs, self.opt)
self._resources.append(resource)
else:
msg = "Unknown resource %s being " \
"added to context" % resource.__class__
raise aomi_excep.AomiError(msg)
def remove(self, resource):
"""Removes a resource from the context"""
if isinstance(resource, Resource):
self._resources.remove(resource)
def sync_policies(self, vault_client):
"""Synchronizes policies only"""
p_resources = [x for x in self.resources()
if isinstance(x, Policy)]
for resource in p_resources:
resource.sync(vault_client)
return [x for x in self.resources()
if not isinstance(x, Policy)]
def sync_auth(self, vault_client, resources):
"""Synchronizes auth mount wrappers. These happen
early in the cycle, to ensure that user backends
are proper. They may also be used to set mount
tuning"""
for auth in self.auths():
auth.sync(vault_client)
auth_resources = [x for x in resources
if isinstance(x, (LDAP, UserPass))]
for resource in auth_resources:
resource.sync(vault_client)
return [x for x in resources
if not isinstance(x, (LDAP, UserPass, AuditLog))]
def actually_mount(self, vault_client, resource, active_mounts):
"""Handle the actual (potential) mounting of a secret backend.
This is called in multiple contexts, but the action will always
be the same. If we were not aware of the mountpoint at the start
and it has not already been mounted, then mount it."""
a_mounts = list(active_mounts)
if isinstance(resource, Secret) and resource.mount == 'cubbyhole':
return a_mounts
active_mount = find_backend(resource.mount, active_mounts)
if not active_mount:
actual_mount = find_backend(resource.mount, self._mounts)
a_mounts.append(actual_mount)
actual_mount.sync(vault_client)
return a_mounts
def sync_mounts(self, active_mounts, resources, vault_client):
"""Synchronizes mount points. Removes things before
adding new."""
# Create a resource set that is only explicit mounts
# and sort so removals are first
mounts = [x for x in resources
if isinstance(x, (Mount, AWS))]
s_resources = sorted(mounts, key=absent_sort)
# Iterate over explicit mounts only
for resource in s_resources:
active_mounts = self.actually_mount(vault_client,
resource,
active_mounts)
# OK Now iterate over everything but make sure it is clear
# that ad-hoc mountpoints are deprecated as per
# https://github.com/Autodesk/aomi/issues/110
for resource in [x for x in resources
if isinstance(x, Secret)]:
n_mounts = self.actually_mount(vault_client,
resource,
active_mounts)
if len(n_mounts) != len(active_mounts):
LOG.warning("Ad-Hoc mount with %s. Please specify"
" explicit mountpoints.", resource)
active_mounts = n_mounts
return active_mounts, [x for x in resources
if not isinstance(x, (Mount))]
def sync(self, vault_client, opt):
"""Synchronizes the context to the Vault server. This
has the effect of updating every resource which is
in the context and has changes pending."""
active_mounts = []
for audit_log in self.logs():
audit_log.sync(vault_client)
# Handle policies only on the first pass. This allows us
# to ensure that ACL's are in place prior to actually
# making any changes.
not_policies = self.sync_policies(vault_client)
# Handle auth wrapper resources on the next path. The resources
# may update a path on their own. They may also provide mount
# tuning information.
not_auth = self.sync_auth(vault_client, not_policies)
# Handle mounts only on the next pass. This allows us to
# ensure that everything is in order prior to actually
# provisioning secrets. Note we handle removals before
# anything else, allowing us to address mount conflicts.
active_mounts, not_mounts = self.sync_mounts(active_mounts,
not_auth,
vault_client)
# Now handle everything else. If "best practices" are being
# adhered to then every generic mountpoint should exist by now.
# We handle "child" resources after the first batch
sorted_resources = sorted(not_mounts, key=childless_first)
for resource in sorted_resources:
resource.sync(vault_client)
for mount in self.mounts():
if not find_backend(mount.path, active_mounts):
mount.unmount(vault_client)
if opt.remove_unknown:
self.prune(vault_client)
def prune(self, vault_client):
"""Will remove any mount point which is not actually defined
in this context. """
existing = getattr(vault_client,
SecretBackend.list_fun)()['data'].items()
for mount_name, _values in existing:
# ignore system paths and cubbyhole
mount_path = normalize_vault_path(mount_name)
if mount_path.startswith('sys') or mount_path == 'cubbyhole':
continue
exists = [resource.path
for resource in self.mounts()
if normalize_vault_path(resource.path) == mount_path]
if not exists:
LOG.info("removed unknown mount %s", mount_path)
getattr(vault_client, SecretBackend.unmount_fun)(mount_path)
def fetch(self, vault_client):
"""Updates the context based on the contents of the Vault
server. Note that some resources can not be read after
they have been written to and it is up to those classes
to handle that case properly."""
backends = [(self.mounts, SecretBackend),
(self.auths, AuthBackend),
(self.logs, LogBackend)]
for b_list, b_class in backends:
backend_list = b_list()
if backend_list:
existing = getattr(vault_client, b_class.list_fun)()
for backend in backend_list:
backend.fetch(vault_client, existing)
for rsc in self.resources():
if issubclass(type(rsc), Secret):
nc_exists = (rsc.mount != 'cubbyhole' and
find_backend(rsc.mount, self._mounts).existing)
if nc_exists or rsc.mount == 'cubbyhole':
rsc.fetch(vault_client)
elif issubclass(type(rsc), Auth):
if find_backend(rsc.mount, self._auths).existing:
rsc.fetch(vault_client)
elif issubclass(type(rsc), Mount):
rsc.existing = find_backend(rsc.mount,
self._mounts).existing
else:
rsc.fetch(vault_client)
return self
|
class Context(object):
'''The overall context of an aomi session'''
@staticmethod
def load(config, opt):
'''Loads and returns a full context object based on the Secretfile'''
pass
def thaw(self, tmp_dir):
'''Will thaw every secret into an appropriate temporary location'''
pass
def freeze(self, dest_dir):
'''Freezes every resource within a context'''
pass
def __init__(self, opt):
pass
def mounts(self):
'''Secret backends within context'''
pass
def logs(self):
'''Audit log backends within context'''
pass
def auths(self):
'''Authentication backends within context'''
pass
def resources(self):
'''Vault resources within context'''
pass
def add(self, resource):
'''Add a resource to the context'''
pass
def remove(self, resource):
'''Removes a resource from the context'''
pass
def sync_policies(self, vault_client):
'''Synchronizes policies only'''
pass
def sync_auth(self, vault_client, resources):
'''Synchronizes auth mount wrappers. These happen
early in the cycle, to ensure that user backends
are proper. They may also be used to set mount
tuning'''
pass
def actually_mount(self, vault_client, resource, active_mounts):
'''Handle the actual (potential) mounting of a secret backend.
This is called in multiple contexts, but the action will always
be the same. If we were not aware of the mountpoint at the start
and it has not already been mounted, then mount it.'''
pass
def sync_mounts(self, active_mounts, resources, vault_client):
'''Synchronizes mount points. Removes things before
adding new.'''
pass
def sync_policies(self, vault_client):
'''Synchronizes the context to the Vault server. This
has the effect of updating every resource which is
in the context and has changes pending.'''
pass
def prune(self, vault_client):
'''Will remove any mount point which is not actually defined
in this context. '''
pass
def fetch(self, vault_client):
'''Updates the context based on the contents of the Vault
server. Note that some resources can not be read after
they have been written to and it is up to those classes
to handle that case properly.'''
pass
| 19 | 17 | 14 | 1 | 10 | 3 | 3 | 0.3 | 1 | 16 | 13 | 0 | 16 | 5 | 17 | 17 | 256 | 38 | 168 | 66 | 149 | 50 | 132 | 65 | 114 | 10 | 1 | 3 | 59 |
8,005 |
Autodesk/aomi
|
Autodesk_aomi/aomi/model/backend.py
|
aomi.model.backend.VaultBackend
|
class VaultBackend(object):
"""The abstract concept of a Vault backend"""
backend = None
list_fun = None
mount_fun = None
unmount_fun = None
tune_prefix = ""
description = None
def __str__(self):
if self.backend == self.path:
return self.backend
return "%s %s" % (self.backend, self.path)
def __init__(self, resource, opt, managed=True):
self.path = sanitize_mount(resource.mount)
self.backend = resource.backend
self.existing = dict()
self.present = resource.present
self.config = dict()
self.managed = managed
if hasattr(resource, 'tune') and isinstance(resource.tune, dict):
for tunable in MOUNT_TUNABLES:
tunable_key = tunable[0]
tunable_type = tunable[1]
if tunable_key in resource.tune and \
not isinstance(resource.tune[tunable_key], tunable_type):
e_msg = "Mount tunable %s on %s must be of type %s" % \
(tunable_key, self.path, tunable_type)
raise aomi_excep.AomiData(e_msg)
map_val(self.config, resource.tune, tunable_key)
if 'description' in resource.tune:
self.config['description'] = resource.tune['description']
self.opt = opt
def diff(self):
"""Determines if changes are needed for the Vault backend"""
if not self.present:
if self.existing:
return DEL
return NOOP
is_diff = NOOP
if self.present and self.existing:
a_obj = self.config.copy()
if self.config and diff_dict(a_obj, self.existing, True):
is_diff = CHANGED
if self.description != self.existing.get('description'):
is_diff = CONFLICT
elif self.present and not self.existing:
is_diff = ADD
return is_diff
def sync(self, vault_client):
"""Synchronizes the local and remote Vault resources. Has the net
effect of adding backend if needed"""
if self.present:
if not self.existing:
LOG.info("Mounting %s backend on %s",
self.backend, self.path)
self.actually_mount(vault_client)
else:
LOG.info("%s backend already mounted on %s",
self.backend, self.path)
else:
if self.existing:
LOG.info("Unmounting %s backend on %s",
self.backend, self.path)
self.unmount(vault_client)
else:
LOG.info("%s backend already unmounted on %s",
self.backend, self.path)
if self.present and vault_client.version:
self.sync_tunables(vault_client)
def sync_tunables(self, vault_client):
"""Synchtonizes any tunables we have set"""
if not self.config:
return
a_prefix = self.tune_prefix
if self.tune_prefix:
a_prefix = "%s/" % self.tune_prefix
v_path = "sys/mounts/%s%s/tune" % (a_prefix, self.path)
a_obj = self.config.copy()
if 'description' in a_obj:
del a_obj['description']
t_resp = vault_client.write(v_path, **a_obj)
if t_resp and 'errors' in t_resp and t_resp['errors']:
e_msg = "Unable to update tuning info for %s" % self
raise aomi_excep.VaultData(e_msg)
def fetch(self, vault_client, backends):
"""Updates local resource with context on whether this
backend is actually mounted and available"""
if not is_mounted(self.backend, self.path, backends) or \
self.tune_prefix is None:
return
backend_details = get_backend(self.backend, self.path, backends)
self.existing = backend_details['config']
if backend_details['description']:
self.existing['description'] = backend_details['description']
if vault_client.version is None:
return
if not self.managed:
return
a_prefix = self.tune_prefix
if self.tune_prefix:
a_prefix = "%s/" % self.tune_prefix
v_path = "sys/mounts/%s%s/tune" % (a_prefix, self.path)
t_resp = vault_client.read(v_path)
if 'data' not in t_resp:
e_msg = "Unable to retrieve tuning info for %s" % self
raise aomi_excep.VaultData(e_msg)
e_obj = t_resp['data']
e_obj['description'] = None
n_path = normalize_vault_path(self.path)
if n_path in backends:
a_mount = backends[n_path]
if 'description' in a_mount and a_mount['description']:
e_obj['description'] = a_mount['description']
self.existing = e_obj
def unmount(self, client):
"""Unmounts a backend within Vault"""
getattr(client, self.unmount_fun)(mount_point=self.path)
def actually_mount(self, client):
"""Actually mount something in Vault"""
a_obj = self.config.copy()
if 'description' in a_obj:
del a_obj['description']
try:
m_fun = getattr(client, self.mount_fun)
if self.description and a_obj:
m_fun(self.backend,
mount_point=self.path,
description=self.description,
config=a_obj)
elif self.description:
m_fun(self.backend,
mount_point=self.path,
description=self.description)
elif a_obj:
m_fun(self.backend,
mount_point=self.path,
config=a_obj)
else:
m_fun(self.backend,
mount_point=self.path)
except hvac.exceptions.InvalidRequest as exception:
match = re.match('existing mount at (?P<path>.+)', str(exception))
if match:
e_msg = "%s has a mountpoint conflict with %s" % \
(self.path, match.group('path'))
raise aomi_excep.VaultConstraint(e_msg)
else:
raise
|
class VaultBackend(object):
'''The abstract concept of a Vault backend'''
def __str__(self):
pass
def __init__(self, resource, opt, managed=True):
pass
def diff(self):
'''Determines if changes are needed for the Vault backend'''
pass
def sync(self, vault_client):
'''Synchronizes the local and remote Vault resources. Has the net
effect of adding backend if needed'''
pass
def sync_tunables(self, vault_client):
'''Synchtonizes any tunables we have set'''
pass
def fetch(self, vault_client, backends):
'''Updates local resource with context on whether this
backend is actually mounted and available'''
pass
def unmount(self, client):
'''Unmounts a backend within Vault'''
pass
def actually_mount(self, client):
'''Actually mount something in Vault'''
pass
| 9 | 7 | 20 | 3 | 17 | 1 | 5 | 0.06 | 1 | 5 | 3 | 3 | 8 | 6 | 8 | 8 | 178 | 30 | 139 | 45 | 130 | 9 | 115 | 44 | 106 | 9 | 1 | 3 | 41 |
8,006 |
Autodesk/aomi
|
Autodesk_aomi/aomi/model/backend.py
|
aomi.model.backend.SecretBackend
|
class SecretBackend(VaultBackend):
"""Secret Backends for actual Vault resources"""
list_fun = 'list_secret_backends'
mount_fun = 'enable_secret_backend'
unmount_fun = 'disable_secret_backend'
|
class SecretBackend(VaultBackend):
'''Secret Backends for actual Vault resources'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 5 | 0 | 4 | 4 | 3 | 1 | 4 | 4 | 3 | 0 | 2 | 0 | 0 |
8,007 |
Autodesk/aomi
|
Autodesk_aomi/aomi/model/backend.py
|
aomi.model.backend.LogBackend
|
class LogBackend(VaultBackend):
"""Audit Log backends"""
list_fun = 'list_audit_backends'
mount_fun = 'enable_audit_backend'
unmount_fun = 'disable_audit_backend'
tune_prefix = None
def __init__(self, resource, opt, managed=True):
super(LogBackend, self).__init__(resource, opt, managed)
self.obj = resource.obj()
def actually_mount(self, client):
client.enable_audit_backend(self.backend, **self.obj)
def unmount(self, client):
client.disable_audit_backend(self.backend)
|
class LogBackend(VaultBackend):
'''Audit Log backends'''
def __init__(self, resource, opt, managed=True):
pass
def actually_mount(self, client):
pass
def unmount(self, client):
pass
| 4 | 1 | 2 | 0 | 2 | 0 | 1 | 0.08 | 1 | 1 | 0 | 0 | 3 | 1 | 3 | 11 | 16 | 3 | 12 | 9 | 8 | 1 | 12 | 9 | 8 | 1 | 2 | 0 | 3 |
8,008 |
Autodesk/aomi
|
Autodesk_aomi/aomi/model/backend.py
|
aomi.model.backend.AuthBackend
|
class AuthBackend(VaultBackend):
"""Authentication backends for Vault access"""
list_fun = 'list_auth_backends'
mount_fun = 'enable_auth_backend'
unmount_fun = 'disable_auth_backend'
tune_prefix = '/auth'
def actually_mount(self, client):
m_fun = getattr(client, self.mount_fun)
if self.description and self.config and 'description' in self.config:
m_fun(self.backend,
mount_point=self.path,
description=self.config['description'])
elif self.description:
m_fun(self.backend,
mount_point=self.path,
description=self.config['description'])
else:
m_fun(self.backend,
mount_point=self.path)
|
class AuthBackend(VaultBackend):
'''Authentication backends for Vault access'''
def actually_mount(self, client):
pass
| 2 | 1 | 13 | 0 | 13 | 0 | 3 | 0.06 | 1 | 0 | 0 | 0 | 1 | 2 | 1 | 9 | 20 | 1 | 18 | 9 | 16 | 1 | 11 | 7 | 9 | 3 | 2 | 1 | 3 |
8,009 |
Autodesk/aomi
|
Autodesk_aomi/aomi/model/aws.py
|
aomi.model.aws.AWSTTL
|
class AWSTTL(Resource):
"""AWS Lease"""
child = True
def __init__(self, mount, obj, opt):
super(AWSTTL, self).__init__(obj, opt)
self.path = "%s/config/lease" % mount
self._obj = obj
|
class AWSTTL(Resource):
'''AWS Lease'''
def __init__(self, mount, obj, opt):
pass
| 2 | 1 | 4 | 0 | 4 | 0 | 1 | 0.17 | 1 | 1 | 0 | 0 | 1 | 2 | 1 | 22 | 8 | 1 | 6 | 5 | 4 | 1 | 6 | 5 | 4 | 1 | 2 | 0 | 1 |
8,010 |
Autodesk/aomi
|
Autodesk_aomi/tests/test_model_auth.py
|
test_model_auth.GeneratedTokenRoleTest
|
class GeneratedTokenRoleTest(unittest.TestCase):
"""GeneratedTokenRoleTest"""
def test_new_tokenrole(self):
authbackend = aomi.model.auth
test_obj = generate_tokenrole_object()
tokenrole = authbackend.TokenRole(test_obj, {})
assert tokenrole.role_name == test_obj['name']
assert tokenrole.path == ("auth/token/roles/%s" % test_obj['name'])
def test_diff_tokenrole(self):
authbackend = aomi.model.auth
NOOP = 0
CHANGED = 1
ADD = 2
DEL = 3
OVERWRITE = 4
test_obj_a = generate_tokenrole_object()
tokenrole_a = authbackend.TokenRole(test_obj_a, {})
# test for adding
assert tokenrole_a.existing == None
assert tokenrole_a.diff() == ADD
# test for not changing
del test_obj_a['name']
tokenrole_a.existing = test_obj_a
assert tokenrole_a.diff() == NOOP
# test for changing
test_obj_b = generate_tokenrole_object()
del test_obj_b['name']
tokenrole_a._obj = test_obj_b # mutate token role
assert tokenrole_a.diff() == CHANGED
# test for deleting
tokenrole_a._obj = {}
tokenrole_a.present = False
assert tokenrole_a.diff() == DEL
|
class GeneratedTokenRoleTest(unittest.TestCase):
'''GeneratedTokenRoleTest'''
def test_new_tokenrole(self):
pass
def test_diff_tokenrole(self):
pass
| 3 | 1 | 20 | 5 | 14 | 3 | 1 | 0.21 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 74 | 44 | 11 | 28 | 15 | 25 | 6 | 28 | 15 | 25 | 1 | 2 | 0 | 2 |
8,011 |
Autodesk/aomi
|
Autodesk_aomi/tests/test_helpers.py
|
test_helpers.IsTaggedTest
|
class IsTaggedTest(unittest.TestCase):
def test_happy_path(self):
self.assertTrue(aomi.helpers.is_tagged([], []))
def test_exclusion(self):
self.assertFalse(aomi.helpers.is_tagged([], ['foo']))
self.assertFalse(aomi.helpers.is_tagged(['foo'], ['bar']))
self.assertFalse(aomi.helpers.is_tagged(['foo', 'bar'], ['foo']))
def test_inclusion(self):
self.assertTrue(aomi.helpers.is_tagged(['foo'], ['foo']))
self.assertTrue(aomi.helpers.is_tagged(['foo'], ['foo', 'bar']))
|
class IsTaggedTest(unittest.TestCase):
def test_happy_path(self):
pass
def test_exclusion(self):
pass
def test_inclusion(self):
pass
| 4 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 3 | 0 | 3 | 75 | 12 | 2 | 10 | 4 | 6 | 0 | 10 | 4 | 6 | 1 | 2 | 0 | 3 |
8,012 |
Autodesk/cryptorito
|
Autodesk_cryptorito/tests/test_keybase.py
|
test_keybase.KeybaseTest
|
class KeybaseTest(unittest.TestCase):
def test_happy_path(self):
with requests_mock.Mocker() as m:
m.get(cryptorito.keybase_lookup_url(TEST_USER),
text=load_fixture(TEST_USER))
keys = cryptorito.key_from_keybase(TEST_USER)
assert keys['fingerprint'] == TEST_FINGERPRINT
|
class KeybaseTest(unittest.TestCase):
def test_happy_path(self):
pass
| 2 | 0 | 6 | 0 | 6 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 73 | 7 | 0 | 7 | 4 | 5 | 0 | 6 | 3 | 4 | 1 | 2 | 1 | 1 |
8,013 |
Autodesk/cryptorito
|
Autodesk_cryptorito/tests/test_gpg.py
|
test_gpg.HasKeys
|
class HasKeys(unittest.TestCase):
def test_happypath(self):
flexmock(subprocess) \
.should_receive('check_output') \
.replace_with(key_resp)
assert(cryptorito.has_gpg_key(TEST_KEY))
def test_unhappypath(self):
flexmock(subprocess) \
.should_receive('check_output') \
.replace_with(key_blank)
self.assertFalse(cryptorito.has_gpg_key(TEST_KEY))
|
class HasKeys(unittest.TestCase):
def test_happypath(self):
pass
def test_unhappypath(self):
pass
| 3 | 0 | 6 | 1 | 5 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 74 | 14 | 3 | 11 | 3 | 8 | 0 | 7 | 3 | 4 | 1 | 2 | 0 | 2 |
8,014 |
Autodesk/cryptorito
|
Autodesk_cryptorito/tests/test_default.py
|
test_default.NotAStringHelperTest
|
class NotAStringHelperTest(unittest.TestCase):
def test_happy_path(self):
assert cryptorito.not_a_string(42)
assert cryptorito.not_a_string(False)
assert cryptorito.not_a_string(dict())
|
class NotAStringHelperTest(unittest.TestCase):
def test_happy_path(self):
pass
| 2 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 73 | 5 | 0 | 5 | 2 | 3 | 0 | 5 | 2 | 3 | 1 | 2 | 0 | 1 |
8,015 |
Autodesk/cryptorito
|
Autodesk_cryptorito/tests/test_default.py
|
test_default.HelperTest
|
class HelperTest(unittest.TestCase):
def setUp(self):
self.og_environ = os.environ.copy()
def tearDown(self):
os.environ = self.og_environ
def test_without_passphrase_file(self):
assert cryptorito.passphrase_file() == []
def test_passphrase_file(self):
flexmock(subprocess) \
.should_receive('check_output') \
.replace_with(mock_gpg_vsn)
os.environ['CRYPTORITO_PASSPHRASE_FILE'] = __file__
assert cryptorito.passphrase_file()
|
class HelperTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_without_passphrase_file(self):
pass
def test_passphrase_file(self):
pass
| 5 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 4 | 1 | 4 | 76 | 17 | 4 | 13 | 6 | 8 | 0 | 11 | 6 | 6 | 1 | 2 | 0 | 4 |
8,016 |
Autodesk/cryptorito
|
Autodesk_cryptorito/tests/test_base64.py
|
test_base64.StringTests
|
class StringTests(unittest.TestCase):
ghost_emoji = portable_b64decode('8J+Ruwo=')
some_binary = portable_b64decode('uRo/OptvvkT790yaPjql5OItfFUBSM2tM42QJkPM7qvMTn4tQClPjB6mpdSFDtyzuqGVrMGaHRKv7XuzlZPpWGbVzlCjIvN0nOUiBXSQsockEJwCwIaiwm/xxWSE9+P2zWdqt1J/Iuwv6Rq60qpMRTqWNJD5dDzbw4VdDQhxzgK4zN2Er+JQQqQctsj1XuM8xJtzBQsozt5ZCJso4/jsUsWrFgHPp5nu4whuT7ZSgthsGz+NXo1f6v4njJ705ZMjLW0zdnkx/14E8qGJCsDs8pCkekDn+K4gTLfzZHga/du8xtN6e/X97K2BbdVC8Obz684wnqdHLWc+bNNso+5XFtQbFbK6vBtGtZNmBeiVBo594Zr5xRxFPSfOHIKz0jB4U5He7xgh2C7AFh2SCy4fW1fwC5XxQoz1pRSiFTRbUr/dMHMn0ZaspVYUNPdZccM4xj8ip5k4fXVRTKFF1qEiFGohcfLdabCBXAkckOmGogdN0swOpoiNEohYksW0bkof89q1aRJl6tM9E2spH62XZXDmQFHIdxFFHP6zAl2t7zGB2vxDCpLgQg3l8RytryMfDR7MXXXy2kbhtFpIl45gFl/8u+aOc7fP4dLxacCbJNz3cO3iMXIPytwiaq5HJbgQ6ZgeGjZBniTCRLwRpOv3l3GRsLstdRJSk2KP+kwY9Tk=')
def test_is_base64(self):
assert is_base64(portable_b64encode("foo"))
assert is_base64(portable_b64encode(self.some_binary))
assert is_base64(portable_b64encode(self.ghost_emoji))
self.assertFalse(is_base64("foo"))
self.assertFalse(is_base64("2454"))
self.assertFalse(is_base64("1234"))
def test_happy_path(self):
print("AAAA %s" % portable_b64decode(portable_b64encode("foo")))
assert polite_string(portable_b64decode(portable_b64encode("foo"))) == "foo"
|
class StringTests(unittest.TestCase):
def test_is_base64(self):
pass
def test_happy_path(self):
pass
| 3 | 0 | 5 | 0 | 5 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 74 | 15 | 2 | 13 | 5 | 10 | 0 | 13 | 5 | 10 | 1 | 2 | 0 | 2 |
8,017 |
Autodesk/cryptorito
|
Autodesk_cryptorito/cryptorito/__init__.py
|
cryptorito.CryptoritoError
|
class CryptoritoError(Exception):
"""We do not have complicated exceptions to be honest"""
def __init__(self, message=None):
"""The only thing you can pass is a message, but
even that is optional"""
if message is not None:
super(CryptoritoError, self).__init__(message)
else:
super(CryptoritoError, self).__init__()
|
class CryptoritoError(Exception):
'''We do not have complicated exceptions to be honest'''
def __init__(self, message=None):
'''The only thing you can pass is a message, but
even that is optional'''
pass
| 2 | 2 | 7 | 0 | 5 | 2 | 2 | 0.5 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 11 | 9 | 0 | 6 | 2 | 4 | 3 | 5 | 2 | 3 | 2 | 3 | 1 | 2 |
8,018 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/tests/test_engines.py
|
pyccc.tests.test_engines.no_context
|
class no_context():
"""context manager that does nothing -- useful if we need to conditionally apply a context
"""
def __enter__(self):
return None
def __exit__(self, exc_type, exc_value, traceback):
return False
|
class no_context():
'''context manager that does nothing -- useful if we need to conditionally apply a context
'''
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
| 3 | 1 | 2 | 0 | 2 | 0 | 1 | 0.4 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 2 | 8 | 1 | 5 | 3 | 2 | 2 | 5 | 3 | 2 | 1 | 0 | 0 | 2 |
8,019 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/engines/dockerengine.py
|
pyccc.engines.dockerengine.Docker
|
class Docker(EngineBase):
""" A compute engine - uses a docker server to run jobs
"""
USES_IMAGES = True
ABSPATHS = True
BULK_OUTPUT_FILE_THRESHOLD = 5
"""int: threshold for determining whether to copy files in bulk or one at a time.
Not a whole lot of justification for this number, just a rough heuristic
"""
def __init__(self, client=None, workingdir='/workingdir'):
""" Initialization:
Args:
client (docker.Client): a docker-py client. If not passed, we will try to create the
client from the job's environmental varaibles
workingdir (str): default working directory to create in the containers
"""
self.client = self.connect_to_docker(client)
self.default_wdir = workingdir
self.hostname = self.client.base_url
def connect_to_docker(self, client=None):
if isinstance(client, basestring):
client = du.get_docker_apiclient(client)
if client is None:
client = du.get_docker_apiclient(**docker.utils.kwargs_from_env())
return client
def __getstate__(self):
"""
We don't pickle the docker client, for now
"""
newdict = self.__dict__.copy()
if 'client' in newdict:
newdict['client'] = None
return newdict
def test_connection(self):
version = self.client.version()
return version
def get_job(self, jobid):
""" Return a Job object for the requested job id.
The returned object will be suitable for retrieving output, but depending on the engine,
may not populate all fields used at launch time (such as `job.inputs`, `job.commands`, etc.)
Args:
jobid (str): container id
Returns:
pyccc.job.Job: job object for this container
Raises:
pyccc.exceptions.JobNotFound: if no job could be located for this jobid
"""
import shlex
from pyccc.job import Job
job = Job(engine=self)
job.jobid = job.rundata.containerid = jobid
try:
jobdata = self.client.inspect_container(job.jobid)
except docker.errors.NotFound:
raise exceptions.JobNotFound(
'The daemon could not find containter "%s"' % job.jobid)
cmd = jobdata['Config']['Cmd']
entrypoint = jobdata['Config']['Entrypoint']
if len(cmd) == 3 and cmd[0:2] == ['sh', '-c']:
cmd = cmd[2]
elif entrypoint is not None:
cmd = entrypoint + cmd
if isinstance(cmd, list):
cmd = ' '.join(shlex.quote(x) for x in cmd)
job.command = cmd
job.env = jobdata['Config']['Env']
job.workingdir = jobdata['Config']['WorkingDir']
job.rundata.container = jobdata
return job
def submit(self, job):
""" Submit job to the engine
Args:
job (pyccc.job.Job): Job to submit
"""
self._check_job(job)
if job.workingdir is None:
job.workingdir = self.default_wdir
job.imageid = du.create_provisioned_image(self.client, job.image,
job.workingdir, job.inputs)
container_args = self._generate_container_args(job)
job.rundata.container = self.client.create_container(job.imageid, **container_args)
self.client.start(job.rundata.container)
job.rundata.containerid = job.rundata.container['Id']
job.jobid = job.rundata.containerid
def _generate_container_args(self, job):
container_args = dict(command="sh -c '%s'" % job.command,
working_dir=job.workingdir,
environment={'PYTHONIOENCODING':'utf-8'})
if job.env:
container_args['environment'].update(job.env)
volumes = []
binds = []
# mount the docker socket into the container (two ways to do this for backwards compat.)
if job.withdocker or job.engine_options.get('mount_docker_socket', False):
volumes.append('/var/run/docker.sock')
binds.append('/var/run/docker.sock:/var/run/docker.sock:rw')
# handle other mounted volumes
for volume, mount in job.engine_options.get('volumes', {}).items():
if isinstance(mount, (list, tuple)):
mountpoint, mode = mount
bind = '%s:%s:%s' % (volume, mountpoint, mode)
else:
mountpoint = mount
bind = '%s:%s' % (volume, mountpoint)
volumes.append(mountpoint)
binds.append(bind)
if volumes or binds:
container_args['volumes'] = volumes
container_args['host_config'] = self.client.create_host_config(binds=binds)
return container_args
def wait(self, job):
stat = self.client.wait(job.rundata.container)
if isinstance(stat, int): # i.e., docker<3
return stat
else: # i.e., docker>=3
return stat['StatusCode']
def kill(self, job):
self.client.kill(job.rundata.container)
def get_status(self, job):
inspect = self.client.inspect_container(job.rundata.containerid)
if inspect['State']['Running']:
return status.RUNNING
else:
return status.FINISHED
def get_directory(self, job, path):
docker_host = du.kwargs_from_client(self.client)
remotedir = files.DockerArchive(docker_host, job.rundata.containerid, path)
return remotedir
def dump_all_outputs(self, job, target, abspaths=None):
""" Specialized dumping strategy - copy the entire working directory, then discard
the input files that came along for the ride.
Not used if there are absolute paths
This is slow and wasteful if there are big input files
"""
import os
import shutil
from pathlib import Path
root = Path(native_str(target))
true_outputs = job.get_output()
if abspaths or len(true_outputs) < self.BULK_OUTPUT_FILE_THRESHOLD:
return super().dump_all_outputs(job, root, abspaths)
stagingdir = root / Path(native_str(job.workingdir)).name
workdir = job.get_directory(job.workingdir)
if not root.is_dir():
root.mkdir(parents=False)
if stagingdir.exists():
if PY2:
raise IOError('Path % exists' % stagingdir)
else:
raise FileExistsError(stagingdir)
workdir.put(str(root))
assert stagingdir.is_dir()
assert root in stagingdir.parents
for pathstr in true_outputs:
if os.path.isabs(pathstr):
continue
destpath = root / pathstr
currpath = stagingdir / pathstr
if not destpath.parent.is_dir():
destpath.parent.mkdir(parents=True)
currpath.rename(destpath)
shutil.rmtree(str(stagingdir))
def _list_output_files(self, job):
docker_diff = self.client.diff(job.rundata.container)
if docker_diff is None:
return {}
changed_files = [f['Path'] for f in docker_diff
if f['Kind'] in (CTR_MODIFIED, CTR_ADDED)]
file_paths = utils.remove_directories(changed_files)
docker_host = du.kwargs_from_client(self.client)
output_files = {}
for filename in file_paths:
# Return relative localpath unless it's not under the working directory
if filename.strip()[0] != '/':
relative_path = '%s/%s' % (job.workingdir, filename)
elif filename.startswith(job.workingdir):
relative_path = filename[len(job.workingdir):]
if len(relative_path) > 0 and relative_path[0] == '/':
relative_path = relative_path[1:]
else:
relative_path = filename
remotefile = files.LazyDockerCopy(docker_host, job.rundata.containerid, filename)
output_files[relative_path] = remotefile
return output_files
def _get_final_stds(self, job):
stdout = self.client.logs(job.rundata.container, stdout=True, stderr=False)
stderr = self.client.logs(job.rundata.container, stdout=False, stderr=True)
return stdout.decode('utf-8'), stderr.decode('utf-8')
|
class Docker(EngineBase):
''' A compute engine - uses a docker server to run jobs
'''
def __init__(self, client=None, workingdir='/workingdir'):
''' Initialization:
Args:
client (docker.Client): a docker-py client. If not passed, we will try to create the
client from the job's environmental varaibles
workingdir (str): default working directory to create in the containers
'''
pass
def connect_to_docker(self, client=None):
pass
def __getstate__(self):
'''
We don't pickle the docker client, for now
'''
pass
def test_connection(self):
pass
def get_job(self, jobid):
''' Return a Job object for the requested job id.
The returned object will be suitable for retrieving output, but depending on the engine,
may not populate all fields used at launch time (such as `job.inputs`, `job.commands`, etc.)
Args:
jobid (str): container id
Returns:
pyccc.job.Job: job object for this container
Raises:
pyccc.exceptions.JobNotFound: if no job could be located for this jobid
'''
pass
def submit(self, job):
''' Submit job to the engine
Args:
job (pyccc.job.Job): Job to submit
'''
pass
def _generate_container_args(self, job):
pass
def wait(self, job):
pass
def kill(self, job):
pass
def get_status(self, job):
pass
def get_directory(self, job, path):
pass
def dump_all_outputs(self, job, target, abspaths=None):
''' Specialized dumping strategy - copy the entire working directory, then discard
the input files that came along for the ride.
Not used if there are absolute paths
This is slow and wasteful if there are big input files
'''
pass
def _list_output_files(self, job):
pass
def _get_final_stds(self, job):
pass
| 15 | 6 | 15 | 2 | 11 | 2 | 3 | 0.25 | 1 | 10 | 2 | 0 | 14 | 3 | 14 | 31 | 236 | 48 | 152 | 60 | 132 | 38 | 140 | 60 | 120 | 8 | 2 | 3 | 41 |
8,020 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/picklers.py
|
pyccc.picklers.ReturningPickler
|
class ReturningPickler(pickle.Pickler):
""" Pickler for objects on the RETURN leg of the roundtrip
"""
def persistent_id(self, obj):
""" Replaces object reference
"""
if getattr(obj, '_persistent_ref', None) is not None:
return obj._persistent_ref
else:
return None
|
class ReturningPickler(pickle.Pickler):
''' Pickler for objects on the RETURN leg of the roundtrip
'''
def persistent_id(self, obj):
''' Replaces object reference
'''
pass
| 2 | 2 | 7 | 0 | 5 | 2 | 2 | 0.67 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 10 | 0 | 6 | 2 | 4 | 4 | 5 | 2 | 3 | 2 | 0 | 1 | 2 |
8,021 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/picklers.py
|
pyccc.picklers.ReturningUnpickler
|
class ReturningUnpickler(pickle.Unpickler):
""" Pickler for RETURNING objects that will retain references on roundtrip
"""
def persistent_load(self, pid):
return _weakmemos[pid]
|
class ReturningUnpickler(pickle.Unpickler):
''' Pickler for RETURNING objects that will retain references on roundtrip
'''
def persistent_load(self, pid):
pass
| 2 | 1 | 2 | 0 | 2 | 0 | 1 | 0.67 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 5 | 0 | 3 | 2 | 1 | 2 | 3 | 2 | 1 | 1 | 0 | 0 | 1 |
8,022 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/python.py
|
pyccc.python.PackagedFunction
|
class PackagedFunction(native.object):
"""
This object captures enough information to serialize, deserialize, and run a
python function
Specifically, this creates an object that, after pickling, can be unpickled and run,
and behave in *basically* the same way as the original function. HOWEVER, the function's
source code must be sent separately - that part can't be pickled.
Notes:
- This is *designed* to execute arbitrary code. Use with trusted sources only!
- This will work best with *pure* functions operating on well-behaved python objects.
- All relevant variables and arguments need to be pickle-able.
- Object methods that only operate on the object itself can be considered "pure".
- Function side effects are not tracked at all.
- Closure variables and module references will be sent along with the function
"""
def __init__(self, function_call, persist_references):
func = function_call.function
self.is_imethod = function_call.is_instancemethod
if self.is_imethod:
self.obj = func.__self__
self.imethod_name = func.__name__
else:
self.func_name = func.__name__
self.args = function_call.args
self.kwargs = function_call.kwargs
self.persist_references = persist_references
globalvars = src.get_global_vars(func)
self.global_closure = globalvars['vars']
self.global_modules = globalvars['modules']
self.global_functions = globalvars['functions']
def run(self, func=None):
"""
Evaluates the packaged function as func(*self.args,**self.kwargs)
If func is a method of an object, it's accessed as getattr(self.obj,__name__).
If it's a user-defined function, it needs to be passed in here because it can't
be serialized.
Returns:
object: function's return value
"""
to_run = self.prepare_namespace(func)
result = to_run(*self.args, **self.kwargs)
return result
def prepare_namespace(self, func):
"""
Prepares the function to be run after deserializing it.
Re-associates any previously bound variables and modules from the closure
Returns:
callable: ready-to-call function
"""
if self.is_imethod:
to_run = getattr(self.obj, self.imethod_name)
else:
to_run = func
for varname, modulename in self.global_modules.items():
to_run.__globals__[varname] = __import__(modulename)
if self.global_closure:
to_run.__globals__.update(self.global_closure)
if self.global_functions:
to_run.__globals__.update(self.global_functions)
return to_run
|
class PackagedFunction(native.object):
'''
This object captures enough information to serialize, deserialize, and run a
python function
Specifically, this creates an object that, after pickling, can be unpickled and run,
and behave in *basically* the same way as the original function. HOWEVER, the function's
source code must be sent separately - that part can't be pickled.
Notes:
- This is *designed* to execute arbitrary code. Use with trusted sources only!
- This will work best with *pure* functions operating on well-behaved python objects.
- All relevant variables and arguments need to be pickle-able.
- Object methods that only operate on the object itself can be considered "pure".
- Function side effects are not tracked at all.
- Closure variables and module references will be sent along with the function
'''
def __init__(self, function_call, persist_references):
pass
def run(self, func=None):
'''
Evaluates the packaged function as func(*self.args,**self.kwargs)
If func is a method of an object, it's accessed as getattr(self.obj,__name__).
If it's a user-defined function, it needs to be passed in here because it can't
be serialized.
Returns:
object: function's return value
'''
pass
def prepare_namespace(self, func):
'''
Prepares the function to be run after deserializing it.
Re-associates any previously bound variables and modules from the closure
Returns:
callable: ready-to-call function
'''
pass
| 4 | 3 | 16 | 1 | 10 | 5 | 3 | 0.88 | 1 | 0 | 0 | 0 | 3 | 10 | 3 | 3 | 68 | 8 | 32 | 20 | 28 | 28 | 30 | 20 | 26 | 5 | 1 | 1 | 8 |
8,023 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/python.py
|
pyccc.python.PythonCall
|
class PythonCall(object):
def __init__(self, function, *args, **kwargs):
self.function = function
self.args = args
self.kwargs = kwargs
try:
cls = function.__self__.__class__
except AttributeError:
self.is_instancemethod = False
else:
if function.__self__ == BUILTINS or function.__self__ is None:
self.is_instancemethod = False
else:
self.is_instancemethod = True
|
class PythonCall(object):
def __init__(self, function, *args, **kwargs):
pass
| 2 | 0 | 14 | 1 | 13 | 0 | 3 | 0 | 1 | 1 | 0 | 0 | 1 | 4 | 1 | 1 | 15 | 1 | 14 | 7 | 12 | 0 | 13 | 7 | 11 | 3 | 1 | 2 | 3 |
8,024 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/python.py
|
pyccc.python.PythonJob
|
class PythonJob(job.Job):
# @utils.doc_inherit
def __init__(self, engine, image, command,
interpreter=DEFAULT_INTERPRETER,
persist_references=False,
sendsource=True, **kwargs):
self._raised = False
self._updated_object = None
self._exception = None
self._traceback = None
self.sendsource = sendsource
self._function_result = None
self.interpreter = self._clean_interpreter_string(interpreter)
self.persist_references = persist_references
self.function_call = command
if 'inputs' not in kwargs:
kwargs['inputs'] = {}
inputs = kwargs['inputs']
# assemble the commands to run
python_files = self._get_python_files()
inputs.update(python_files)
command = '%s run_job.py' % self.interpreter
super(PythonJob, self).__init__(engine, image, command,
**kwargs)
@staticmethod
def _clean_interpreter_string(istr):
try:
float(istr)
except ValueError:
return istr
else:
return 'python' + str(istr)
def _get_python_files(self):
"""
Construct the files to send the remote host
Returns:
dict: dictionary of filenames and file objects
"""
python_files = {'run_job.py': PYTHON_JOB_FILE}
remote_function = PackagedFunction(self.function_call, self.persist_references)
if self.persist_references:
dumps = picklers.departure_dumps
else:
dumps = pickle.dumps
python_files['function.pkl'] = pyccc.BytesContainer(
dumps(remote_function, protocol=PICKLE_PROTOCOL),
name='function.pkl')
self._remote_function = remote_function
sourcefile = StringContainer(self._get_source(),
name='source.py')
python_files['source.py'] = sourcefile
return python_files
def _get_source(self, remote_function=None):
"""
Calls the appropriate source inspection to get any required source code
Will also get code for any functions referenced via closure (currently one level deep only)
Returns:
bytes: utf-8 encoded source code
"""
srclines = [u'# -*- coding: utf-8 -*-\n']
if self.sendsource:
func = self.function_call.function
if self.function_call.is_instancemethod:
obj = func.__self__.__class__
else:
obj = func
srclines.append(src.getsource(obj))
elif not self.function_call.is_instancemethod:
srclines.append("from %s import %s\n"%(self.function_call.function.__module__,
self.function_call.function.__name__))
if remote_function is not None and remote_function.global_functions:
for name, f in remote_function.global_functions.items():
srclines.append(u'\n# source code for function "%s"' % name)
srclines.append(src.getsource(f))
# This is the only source code needed from pyccc
srclines.append(PACKAGEDFUNCTIONSOURCE)
srccode = '\n'.join(srclines)
return srccode.encode('utf-8')
@property
def result(self):
"""The return value of the callback function if provided, or ``self.function_result`` if not
"""
self._ensure_finished()
if self.when_finished is None:
return self.function_result
else:
return self._callback_result
@property
def function_result(self):
""" The return value of the called python function
"""
self._ensure_finished()
if self._function_result is None:
self.reraise_remote_exception(force=True) # there's no result to return
try:
returnval = self.get_output('_function_return.pkl')
except KeyError:
raise ProgramFailure(self)
if self.persist_references:
loads = picklers.return_loads
else:
loads = pickle.loads
self._function_result = loads(returnval.read('rb'))
return self._function_result
@property
def updated_object(self):
"""
If the function was an object's method, return the new state of the object
Will re-raise any exceptions raised remotely
"""
if self._updated_object is None:
self.reraise_remote_exception()
self._updated_object = pickle.loads(self.get_output('_object_state.pkl').read('rb'))
return self._updated_object
@property
def exception(self):
"""
The exception object, if any, from the remote execution
"""
if self._exception is None:
if 'exception.pkl' in self.get_output():
self._raised = False
try:
self._exception = pickle.loads(self.get_output('exception.pkl').read('rb'))
except Exception as exc: # catches errors in unpickling the exception
self._exception = exc
self._traceback = self.get_output('traceback.txt').read()
else:
self._exception = False
return self._exception
def reraise_remote_exception(self, force=False):
"""
Raises exceptions from the remote execution
"""
# TODO: include debugging info / stack variables using tblib? - even if possible, this won't work without deps
import tblib
if (force or not self._raised) and self.exception:
self._raised = True
raise self._exception
|
class PythonJob(job.Job):
def __init__(self, engine, image, command,
interpreter=DEFAULT_INTERPRETER,
persist_references=False,
sendsource=True, **kwargs):
pass
@staticmethod
def _clean_interpreter_string(istr):
pass
def _get_python_files(self):
'''
Construct the files to send the remote host
Returns:
dict: dictionary of filenames and file objects
'''
pass
def _get_source(self, remote_function=None):
'''
Calls the appropriate source inspection to get any required source code
Will also get code for any functions referenced via closure (currently one level deep only)
Returns:
bytes: utf-8 encoded source code
'''
pass
@property
def result(self):
'''The return value of the callback function if provided, or ``self.function_result`` if not
'''
pass
@property
def function_result(self):
''' The return value of the called python function
'''
pass
@property
def updated_object(self):
'''
If the function was an object's method, return the new state of the object
Will re-raise any exceptions raised remotely
'''
pass
@property
def exception(self):
'''
The exception object, if any, from the remote execution
'''
pass
def reraise_remote_exception(self, force=False):
'''
Raises exceptions from the remote execution
'''
pass
| 15 | 7 | 17 | 2 | 12 | 4 | 3 | 0.3 | 1 | 8 | 2 | 0 | 8 | 10 | 9 | 29 | 165 | 26 | 110 | 43 | 91 | 33 | 91 | 34 | 80 | 6 | 2 | 3 | 26 |
8,025 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/tests/function_tests.py
|
pyccc.tests.function_tests.Cls
|
class Cls(object):
def __init__(self):
self.x = 0
def increment(self, by=1):
self.x += by
return self.x
|
class Cls(object):
def __init__(self):
pass
def increment(self, by=1):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 7 | 1 | 6 | 4 | 3 | 0 | 6 | 4 | 3 | 1 | 1 | 0 | 2 |
8,026 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/tests/test_engines.py
|
pyccc.tests.test_engines.MyRefObj
|
class MyRefObj(object):
_PERSIST_REFERENCES = True
def identity(self):
return self
def tagme(self):
self.tag = 'mytag'
return self
|
class MyRefObj(object):
def identity(self):
pass
def tagme(self):
pass
| 3 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 9 | 2 | 7 | 5 | 4 | 0 | 7 | 5 | 4 | 1 | 1 | 0 | 2 |
8,027 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/ui.py
|
pyccc.ui.FileBrowser
|
class FileBrowser(Tab):
def __init__(self, file_dict, ignore_ext=None, **kwargs):
if ignore_ext is None:
ignore_ext = 'pyo pyc'.split()
titles = []
file_list = [ipy.Box()]
ignores = set(ignore_ext)
for filename, fileobj in file_dict.items():
ext = filename.split('.')[-1]
if ext in ignores:
continue
file_display = FileView(filename, fileobj)
file_list.append(file_display)
titles.append(filename)
super(FileBrowser, self).__init__(file_list, **kwargs)
self.set_title(0, 'x')
for ititle, title in enumerate(titles):
self.set_title(ititle + 1, title)
|
class FileBrowser(Tab):
def __init__(self, file_dict, ignore_ext=None, **kwargs):
pass
| 2 | 0 | 18 | 1 | 17 | 0 | 5 | 0 | 1 | 4 | 1 | 0 | 1 | 0 | 1 | 1 | 19 | 1 | 18 | 9 | 16 | 0 | 18 | 9 | 16 | 5 | 1 | 2 | 5 |
8,028 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/ui.py
|
pyccc.ui.FileView
|
class FileView(Box):
CHUNK = 10000
TRUNCATE_MESSAGE = '... [click "See more" to continue]'
TEXTAREA_KWARGS = dict(font_family='monospace',
width='75%')
def __init__(self, filename, fileobj, **kwargs):
super(FileView, self).__init__(disabled=True)
self.filename = filename
self._string = None
self._current_pos = 0
self.load_more_button = None
self.textarea = None
self._fileobj = fileobj
# For files that need to be fetched, make a download button
if hasattr(fileobj, '_fetched') and not fileobj._fetched:
self.download_button = ipy.Button(description='Download')
self.children = [self.download_button]
self.download_button.on_click(self.handle_download_click)
# if it's file-like, get the _contents
elif hasattr(fileobj, 'read'):
try:
self._string = fileobj.read()
except UnicodeDecodeError:
self._string = '[NOT SHOWN - UNABLE TO DECODE FILE]'
self.render_string()
# Just display a string
else:
self._string = fileobj
self.render_string()
def render_string(self):
height = '%spx' % min(self._string.count('\n') * 16 + 36, 600)
try:
self.textarea = ipy.Textarea(self._string[:self.CHUNK],
layout=Layout(width='100%', height=height))
except traitlets.TraitError:
self.textarea = ipy.Textarea('[NOT SHOWN - UNABLE TO DECODE FILE]',
layout=Layout(height='300px',
**self.TEXTAREA_KWARGS))
return
finally:
self.children = [self.textarea]
self._current_pos = self.CHUNK
if len(self._string) > self.CHUNK:
self.textarea.value += self.TRUNCATE_MESSAGE
self.load_more_button = ipy.Button(description='See more')
self.load_more_button.on_click(self.load_more)
self.children = self.children + (self.load_more_button,)
def load_more(self, *args, **kwargs):
self._current_pos += self.CHUNK
if self._current_pos >= len(self._string):
self.textarea.value = self._string
self.children = tuple(c for c in self.children if c is not self.load_more_button)
else:
self.textarea.value = self._string[:self._current_pos] + self.TRUNCATE_MESSAGE
def handle_download_click(self, *args):
"""
Callback for download button. Downloads the file and replaces the button
with a view of the file.
:param args:
:return:
"""
self.download_button.on_click(self.handle_download_click,remove=True)
self.download_button.description = 'Downloading ...'
self._string = self._fileobj.read()
self.render_string()
|
class FileView(Box):
def __init__(self, filename, fileobj, **kwargs):
pass
def render_string(self):
pass
def load_more(self, *args, **kwargs):
pass
def handle_download_click(self, *args):
'''
Callback for download button. Downloads the file and replaces the button
with a view of the file.
:param args:
:return:
'''
pass
| 5 | 1 | 15 | 0 | 13 | 2 | 3 | 0.16 | 1 | 3 | 0 | 0 | 4 | 8 | 4 | 4 | 70 | 5 | 56 | 17 | 51 | 9 | 48 | 17 | 43 | 4 | 1 | 2 | 10 |
8,029 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/ui.py
|
pyccc.ui.JobStatusDisplay
|
class JobStatusDisplay(Box):
"""
To be mixed into pyccc job objects
"""
def __init__(self, job, **kwargs):
super(JobStatusDisplay, self).__init__(layout=ipy.Layout(flex_flow='column'))
self._job = job
self.update()
self.on_displayed(self.update)
def update(self, *args):
jobstat = self._job.status
status_display = StatusView(self._job)
if self._job.inputs:
input_browser = FileBrowser(self._job.inputs, margin=5, font_size=9)
else:
input_browser = ipy.HTML('No input files')
file_browser = ipy.Tab([input_browser])
file_browser.set_title(0, 'Input files')
if jobstat == status.FINISHED:
output_files = self._job.get_output()
if self._job.stdout:
output_files['Standard output'] = self._job.stdout
if self._job.stderr:
output_files['Standard error'] = self._job.stderr
output_browser = FileBrowser(output_files, margin=5, font_size=9)
file_browser.children = [input_browser, output_browser]
file_browser.set_title(1, 'Output files')
self.children = [status_display, file_browser]
else:
update_button = ipy.Button(description='Update')
update_button.on_click(self.update)
self.children = [status_display, update_button, file_browser]
|
class JobStatusDisplay(Box):
'''
To be mixed into pyccc job objects
'''
def __init__(self, job, **kwargs):
pass
def update(self, *args):
pass
| 3 | 1 | 15 | 1 | 14 | 0 | 3 | 0.1 | 1 | 3 | 2 | 0 | 2 | 2 | 2 | 2 | 36 | 4 | 29 | 12 | 26 | 3 | 27 | 12 | 24 | 5 | 1 | 2 | 6 |
8,030 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/picklers.py
|
pyccc.picklers.DepartingPickler
|
class DepartingPickler(pickle.Pickler):
""" Pickler for objects on the DEPARTURE leg of the roundtrip
Note: this will _probably_ only handle a single layer round trip for now, and will
fail in weird ways otherwise.
"""
def persistent_id(self, obj):
""" Tags objects with a persistent ID, but do NOT emit it
"""
if getattr(obj, '_PERSIST_REFERENCES', None):
objid = id(obj)
obj._persistent_ref = objid
_weakmemos[objid] = obj
return None
|
class DepartingPickler(pickle.Pickler):
''' Pickler for objects on the DEPARTURE leg of the roundtrip
Note: this will _probably_ only handle a single layer round trip for now, and will
fail in weird ways otherwise.
'''
def persistent_id(self, obj):
''' Tags objects with a persistent ID, but do NOT emit it
'''
pass
| 2 | 2 | 8 | 0 | 6 | 2 | 2 | 0.86 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 15 | 2 | 7 | 3 | 5 | 6 | 7 | 3 | 5 | 2 | 0 | 1 | 2 |
8,031 |
Autodesk/pyccc
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Autodesk_pyccc/versioneer.py
|
versioneer.get_cmdclass.cmd_version
|
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
vers = get_versions(verbose=True)
print("Version: %s" % vers["version"])
print(" full-revisionid: %s" % vers.get("full-revisionid"))
print(" dirty: %s" % vers.get("dirty"))
if vers["error"]:
print(" error: %s" % vers["error"])
|
class cmd_version(Command):
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
pass
| 4 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 3 | 0 | 3 | 3 | 18 | 3 | 15 | 8 | 11 | 0 | 15 | 8 | 11 | 2 | 1 | 1 | 4 |
8,032 |
Autodesk/pyccc
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Autodesk_pyccc/versioneer.py
|
versioneer.get_cmdclass.cmd_build_py
|
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
|
class cmd_build_py(_build_py):
def run(self):
pass
| 2 | 0 | 12 | 0 | 10 | 2 | 2 | 0.18 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 13 | 0 | 11 | 6 | 9 | 2 | 10 | 6 | 8 | 2 | 1 | 1 | 2 |
8,033 |
Autodesk/pyccc
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Autodesk_pyccc/versioneer.py
|
versioneer.get_cmdclass.cmd_build_exe
|
class cmd_build_exe(_build_exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
|
class cmd_build_exe(_build_exe):
def run(self):
pass
| 2 | 0 | 19 | 1 | 18 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 20 | 1 | 19 | 8 | 17 | 0 | 13 | 7 | 11 | 1 | 1 | 1 | 1 |
8,034 |
Autodesk/pyccc
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Autodesk_pyccc/pyccc/utils.py
|
pyccc.utils.DocInherit
|
class DocInherit(object):
"""
Allows methods to inherit docstrings from their superclasses
FROM http://code.activestate.com/recipes/576862/
"""
def __init__(self, mthd):
self.mthd = mthd
self.name = mthd.__name__
def __get__(self, obj, cls):
if obj:
return self.get_with_inst(obj, cls)
else:
return self.get_no_inst(cls)
def get_with_inst(self, obj, cls):
overridden = getattr(super(cls, obj), self.name, None)
@wraps(self.mthd, assigned=('__name__', '__module__'))
def f(*args, **kwargs):
return self.mthd(obj, *args, **kwargs)
return self.use_parent_doc(f, overridden)
def get_no_inst(self, cls):
for parent in cls.__mro__[1:]:
overridden = getattr(parent, self.name, None)
if overridden:
break
@wraps(self.mthd, assigned=('__name__', '__module__'))
def f(*args, **kwargs):
return self.mthd(*args, **kwargs)
return self.use_parent_doc(f, overridden)
def use_parent_doc(self, func, source):
if source is None:
raise NameError("Can't find '%s' in parents" % self.name)
func.__doc__ = source.__doc__
return func
|
class DocInherit(object):
'''
Allows methods to inherit docstrings from their superclasses
FROM http://code.activestate.com/recipes/576862/
'''
def __init__(self, mthd):
pass
def __get__(self, obj, cls):
pass
def get_with_inst(self, obj, cls):
pass
@wraps(self.mthd, assigned=('__name__', '__module__'))
def f(*args, **kwargs):
pass
def get_no_inst(self, cls):
pass
@wraps(self.mthd, assigned=('__name__', '__module__'))
def f(*args, **kwargs):
pass
def use_parent_doc(self, func, source):
pass
| 10 | 1 | 5 | 1 | 4 | 0 | 2 | 0.14 | 1 | 2 | 0 | 0 | 5 | 2 | 5 | 5 | 42 | 10 | 28 | 15 | 18 | 4 | 26 | 13 | 18 | 3 | 1 | 2 | 11 |
8,035 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/utils.py
|
pyccc.utils.Alias
|
class Alias(object):
"""
Descriptor that calls a child object's method.
e.g.
>>> class A(object):
>>> childkeys = Alias('child.keys')
>>> child = dict()
>>>
>>> a = A()
>>> a.child['key'] = 'value'
>>> a.childkeys() #calls a.child.keys(), returns ['key']
"""
def __init__(self, objmethod):
objname, methodname = objmethod.split('.')
self.objname = objname
self.methodname = methodname
def __get__(self, instance, owner):
proxied = getattr(instance, self.objname)
return getattr(proxied,self.methodname)
|
class Alias(object):
'''
Descriptor that calls a child object's method.
e.g.
>>> class A(object):
>>> childkeys = Alias('child.keys')
>>> child = dict()
>>>
>>> a = A()
>>> a.child['key'] = 'value'
>>> a.childkeys() #calls a.child.keys(), returns ['key']
'''
def __init__(self, objmethod):
pass
def __get__(self, instance, owner):
pass
| 3 | 1 | 4 | 0 | 4 | 0 | 1 | 1.38 | 1 | 0 | 0 | 0 | 2 | 2 | 2 | 2 | 20 | 1 | 8 | 7 | 5 | 11 | 8 | 7 | 5 | 1 | 1 | 0 | 2 |
8,036 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/utils.py
|
pyccc.utils.Categorizer
|
class Categorizer(dict):
"""
Create a dict of lists from an iterable, with dict keys given by keyfn
"""
def __init__(self,keyfn,iterable):
super(Categorizer,self).__init__()
self.keyfn = keyfn
for item in iterable:
self.add(item)
def add(self, item):
key = self.keyfn(item)
if key not in self:
self[key] = []
self[key].append(item)
|
class Categorizer(dict):
'''
Create a dict of lists from an iterable, with dict keys given by keyfn
'''
def __init__(self,keyfn,iterable):
pass
def add(self, item):
pass
| 3 | 1 | 5 | 0 | 5 | 0 | 2 | 0.27 | 1 | 1 | 0 | 0 | 2 | 1 | 2 | 29 | 15 | 1 | 11 | 6 | 8 | 3 | 11 | 6 | 8 | 2 | 2 | 1 | 4 |
8,037 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/utils.py
|
pyccc.utils.DictLike
|
class DictLike(object):
__contains__ = Alias('children.__contains__')
__getitem__ = Alias('children.__getitem__')
__setitem__ = Alias('children.__setitem__')
update = Alias('children.update')
get = Alias('children.get')
values = Alias('children.values')
keys = Alias('children.keys')
items = Alias('children.items')
itervalues = Alias('children.itervalues')
iteritems = Alias('children.iteritems')
iterkeys = Alias('children.iterkeys')
def __init__(self,**kwargs):
self.children = {}
self.children.update(kwargs)
|
class DictLike(object):
def __init__(self,**kwargs):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 16 | 1 | 15 | 14 | 13 | 0 | 15 | 14 | 13 | 1 | 1 | 0 | 1 |
8,038 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/utils.py
|
pyccc.utils.DotDict
|
class DotDict(dict):
"""Dict with items accessible as attributes"""
def __getattr__(self,item):
return self[item]
def __setattr__(self,item,val):
self[item] = val
|
class DotDict(dict):
'''Dict with items accessible as attributes'''
def __getattr__(self,item):
pass
def __setattr__(self,item,val):
pass
| 3 | 1 | 2 | 0 | 2 | 0 | 1 | 0.2 | 1 | 0 | 0 | 0 | 2 | 0 | 2 | 29 | 7 | 1 | 5 | 3 | 2 | 1 | 5 | 3 | 2 | 1 | 2 | 0 | 2 |
8,039 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/utils.py
|
pyccc.utils.MarkdownTable
|
class MarkdownTable(object):
def __init__(self,*categories):
self.categories = categories
self.lines = []
def add_line(self,obj):
if hasattr(obj,'keys'):
newline = [obj.get(cat,'') for cat in self.categories]
else:
assert len(obj) == len(self.categories)
newline = obj
self.lines.append(newline)
def markdown(self,replace=None):
if replace is None: replace = {}
outlines = ['| '+' | '.join(self.categories)+' |',
'|-' +''.join('|-' for x in self.categories) + '|']
for line in self.lines:
nextline = [str(replace.get(val,val)) for val in line]
outlines.append('| '+' | '.join(nextline)+' |')
return '\n'.join(outlines)
|
class MarkdownTable(object):
def __init__(self,*categories):
pass
def add_line(self,obj):
pass
def markdown(self,replace=None):
pass
| 4 | 0 | 6 | 0 | 6 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 3 | 2 | 3 | 3 | 22 | 3 | 19 | 11 | 15 | 0 | 18 | 10 | 14 | 3 | 1 | 1 | 6 |
8,040 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/utils.py
|
pyccc.utils.PipedFile
|
class PipedFile(object):
"""
Allows us to pass data by filesystem localpath without ever writing it to disk
To prevent deadlock, we spawn a thread to write to the pipe
Call it as a context manager:
>>> with PipedFile(_conte_contentsname=_conten_contentsipepath:
>>> print open(pipepath,'r').read()
"""
def __init__(self, fileobj, filename='pipe'):
if type(fileobj) in (str, str):
self.fileobj = StringIO(fileobj)
else:
self.fileobj = fileobj
self.tempdir = None
assert '/' not in filename,"Filename must not include directory"
self.filename = filename
def __enter__(self):
self.tempdir = tempfile.mkdtemp()
self.pipe_path = os.path.join(self.tempdir, self.filename)
os.mkfifo(self.pipe_path)
self.pipe_thread = threading.Thread(target=self._write_to_pipe)
self.pipe_thread.start()
return self.pipe_path
def _write_to_pipe(self):
with open(self.pipe_path,'w') as pipe:
pipe.write(self.fileobj.read())
def __exit__(self, type, value, traceback):
if self.tempdir is not None:
shutil.rmtree(self.tempdir)
|
class PipedFile(object):
'''
Allows us to pass data by filesystem localpath without ever writing it to disk
To prevent deadlock, we spawn a thread to write to the pipe
Call it as a context manager:
>>> with PipedFile(_conte_contentsname=_conten_contentsipepath:
>>> print open(pipepath,'r').read()
'''
def __init__(self, fileobj, filename='pipe'):
pass
def __enter__(self):
pass
def _write_to_pipe(self):
pass
def __exit__(self, type, value, traceback):
pass
| 5 | 1 | 5 | 0 | 5 | 0 | 2 | 0.32 | 1 | 3 | 0 | 0 | 4 | 5 | 4 | 4 | 32 | 3 | 22 | 11 | 17 | 7 | 21 | 10 | 16 | 2 | 1 | 1 | 6 |
8,041 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/static/run_job.py
|
run_job.MappedUnpickler
|
class MappedUnpickler(pickle.Unpickler):
RENAMETABLE = {'pyccc.python': 'source',
'__main__': 'source'}
def find_class(self, module, name):
""" This override is here to help pickle find the modules that classes are defined in.
It does three things:
1) remaps the "PackagedFunction" class from pyccc to the `source.py` module.
2) Remaps any classes created in the client's '__main__' to the `source.py` module
3) Creates on-the-fly modules to store any other classes present in source.py
References:
This is a modified version of the 2-only recipe from
https://wiki.python.org/moin/UsingPickle/RenamingModules.
It's been modified for 2/3 cross-compatibility """
import pickle
modname = self.RENAMETABLE.get(module, module)
try:
# can't use ``super`` here (not 2/3 compatible)
klass = pickle.Unpickler.find_class(self, modname, name)
except (ImportError, RuntimeError):
definition = getattr(source, name)
newmod = _makemod(modname)
sys.modules[modname] = newmod
setattr(newmod, name, definition)
klass = pickle.Unpickler.find_class(self, newmod.__name__, name)
klass.__module__ = module
return klass
|
class MappedUnpickler(pickle.Unpickler):
def find_class(self, module, name):
''' This override is here to help pickle find the modules that classes are defined in.
It does three things:
1) remaps the "PackagedFunction" class from pyccc to the `source.py` module.
2) Remaps any classes created in the client's '__main__' to the `source.py` module
3) Creates on-the-fly modules to store any other classes present in source.py
References:
This is a modified version of the 2-only recipe from
https://wiki.python.org/moin/UsingPickle/RenamingModules.
It's been modified for 2/3 cross-compatibility '''
pass
| 2 | 1 | 28 | 5 | 13 | 10 | 2 | 0.63 | 0 | 2 | 0 | 0 | 1 | 0 | 1 | 1 | 32 | 6 | 16 | 8 | 13 | 10 | 15 | 8 | 12 | 2 | 0 | 1 | 2 |
8,042 |
Autodesk/pyccc
|
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Autodesk_pyccc/versioneer.py
|
versioneer.get_cmdclass.cmd_sdist
|
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
# version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory
# (remembering that it may be a hardlink) and replace it with an
# updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile,
self._versioneer_generated_versions)
|
class cmd_sdist(_sdist):
def run(self):
pass
def make_release_tree(self, base_dir, files):
pass
| 3 | 0 | 9 | 0 | 7 | 3 | 1 | 0.36 | 1 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 20 | 1 | 14 | 8 | 11 | 5 | 13 | 8 | 10 | 1 | 1 | 0 | 2 |
8,043 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/job.py
|
pyccc.job.Job
|
class Job(object):
""" Specification for a computational job.
This class is mainly a data container that can interact with the computational engines.
Args:
engine (pycc.engines.EngineBase): compute engine
image (str): docker image to use
command (str): command to run in the docker image
name (str): name of this job (optional)
submit (bool): submit job to engine immediately (default: True if engine and image are
both supplied, False otherwise)
inputs (Mapping[str,files.FileContainer]): dict containing input file names and their
contents (which can be either a FileContainer or just a string)
on_status_update (callable): function that can be called as ``func(job)``; will be called
locally whenever the job's status field is updated
withdocker (bool): whether this job needs access to a docker daemon
when_finished (callable): function that can be called as ``func(job)``; will be called
locally once, when this job completes
numcpus (int): number of CPUs required (default:1)
runtime (int): kill job if the runtime exceeds this value (in seconds) (default: 1 hour)`
engine_options (dict): additional engine-specific options
workingdir (str): working directory in the execution environment (i.e., on the local
system for a subprocess, or inside the container for a docker engine)
env (Dict[str,str]): custom environment variables for the Job
"""
def __init__(self, engine=None,
image=None,
command=None,
name='untitled',
submit=True,
inputs=None,
withdocker=False,
numcpus=1,
runtime=3600,
on_status_update=None,
when_finished=None,
workingdir=None,
engine_options=None,
env=None):
self.name = name
self.engine = engine
self.image = image
self.command = if_not_none(command, '')
self.engine_options = if_not_none(engine_options, {})
self.workingdir = workingdir
self.rundata = DotDict()
self.env = if_not_none(env, {})
self.inputs = if_not_none(inputs, {})
if self.inputs: # translate strings into file objects
for filename, fileobj in inputs.items():
if isinstance(fileobj, basestring):
self.inputs[filename] = files.StringContainer(fileobj)
else:
self.inputs[filename] = fileobj
self.on_status_update = on_status_update
self.when_finished = when_finished
self.numcpus = numcpus
self.runtime = runtime
self.withdocker = withdocker
self._reset()
if submit and self.engine and self.image:
self.submit()
def _reset(self):
self._submitted = False
self._final_stdout = None
self._final_stderr = None
self._finished = False
self._callback_result = None
self._output_files = None
self.jobid = None
self._stopped = None
kill = EngineFunction('kill')
get_stdout_stream = EngineFunction('get_stdoutstream')
get_stderr_stream = EngineFunction('get_stdoutstream')
get_engine_description = EngineFunction('get_engine_description')
_get_final_stds = EngineFunction('_get_final_stds')
_list_output_files = EngineFunction('_list_output_files')
def __str__(self):
desc = ['Job "%s" status:%s' % (self.name, self.status)]
if self.jobid: desc.append('jobid:%s' % (self.jobid,) )
if self.engine: desc.append('engine:%s' % type(self.engine).__name__)
return ' '.join(desc)
def __repr__(self):
s = str(self)
if self.engine:
s += ' host:%s' % self.engine.hostname
if not self.jobid:
s += ' at %s' % hex(id(self))
return '<%s>' % s
def __getstate__(self):
state = self.__dict__.copy()
state.pop('subproc', None)
return state
def submit(self, wait=False, resubmit=False):
""" Submit this job to the assigned engine.
Args:
wait (bool): wait until the job completes?
resubmit (bool): clear all job info and resubmit the job?
Raises:
ValueError: If the job has been previously submitted (and resubmit=False)
"""
if self._submitted:
if resubmit:
self._reset()
else:
raise ValueError('This job has already been submitted')
self.engine.submit(self)
self._submitted = True
if wait: self.wait()
def wait(self):
"""Wait for job to finish"""
returncode = self.engine.wait(self)
self._ensure_finished()
return returncode
@property
def exitcode(self):
if not self._finished:
raise pyccc.JobStillRunning(self)
return self.wait()
@property
def status(self):
"""
Returns status of 'queued', 'running', 'finished' or 'error'
"""
if self._stopped:
return self._stopped
elif self.jobid:
stat = self.engine.get_status(self)
if stat in status.DONE_STATES:
self._stopped = stat
return stat
else:
return "Unsubmitted"
@property
def stopped(self):
_ = self.status
return bool(self._stopped)
def _ensure_finished(self):
"""
To be called after job has finished.
Retreives stdout, stderr, and list of available files
:return:
"""
if self._finished:
return
stat = self.status
if stat not in status.DONE_STATES:
raise pyccc.JobStillRunning(self)
if stat != status.FINISHED:
raise pyccc.EngineError(self, 'Internal error while running job (status:%s)' %
stat)
self._output_files = self.engine._list_output_files(self)
self._final_stdout, self._final_stderr = self.engine._get_final_stds(self)
self._finished = True
if self.when_finished is not None:
self._callback_result = self.when_finished(self)
@property
def result(self):
"""
Returns:
Result of the callback function, if present, otherwise none.
"""
self._ensure_finished()
return self._callback_result
@property
def stdout(self):
self._ensure_finished()
return self._final_stdout
@property
def stderr(self):
self._ensure_finished()
return self._final_stderr
def get_output(self, filename=None):
"""
Return abstract file references to complete files
- returns a dict if filename is None,
or just the specific file otherwise
"""
self._ensure_finished()
if filename:
return self._output_files[filename]
else:
return self._output_files
def get_directory(self, path):
"""
Get a reference to the directory at the specified path
Note:
This function will succeed even if the specified path
does not exist, as it just generates a reference. You won't
encounter an exception until you actually try to access the
non-existent file
"""
return self.engine.get_directory(self, path)
def glob_output(self, pattern):
""" Return dict of all files that match the glob pattern
"""
self._ensure_finished()
filenames = self.get_output()
matches = fnmatch.filter(filenames.keys(), pattern)
return {f: filenames[f] for f in matches}
def get_display_object(self):
"""Return a jupyter widget"""
from .ui import JobStatusDisplay, widgets_enabled
if widgets_enabled:
return JobStatusDisplay(self)
else:
return 'Job "%s" launched. id:%s' % (self.name, self.jobid)
def dump_all_outputs(self, target, abspaths=None, exist_ok=False, update_references=True):
""" Dump all job outputs to a given directory
Output files under the workign directory will be written to the same relative
path under the directory target
Depending on engine implementation, this is often faster than iterating through
all outputs and writing them one-by-one
Params:
target (str): directory to write outputs to.
abspaths (str): subdirectory under target to write
exist_ok (bool): raise exception if directory already exists
update_references (bool): update internal outputs to reference the dumped files,
rather than their original locations
"""
self._ensure_finished()
if not os.path.exists(target) or not exist_ok:
os.mkdir(target)
self.engine.dump_all_outputs(self, target, abspaths)
if update_references:
self.use_local_output_tree(target, abspaths)
def use_local_output_tree(self, target, abspaths=None):
""" Use to update references after they have been dumped somewhere locally
"""
for path in list(self._output_files):
if not os.path.isabs(path):
localpath = os.path.join(target, path)
else:
if abspaths:
localpath = os.path.join(target, abspaths, path.lstrip('/'))
else:
continue
assert os.path.exists(localpath)
if os.path.isdir(localpath):
self._output_files[path] = files.LocalDirectoryReference(localpath)
else:
self._output_files[path] = files.LocalFile(localpath)
|
class Job(object):
''' Specification for a computational job.
This class is mainly a data container that can interact with the computational engines.
Args:
engine (pycc.engines.EngineBase): compute engine
image (str): docker image to use
command (str): command to run in the docker image
name (str): name of this job (optional)
submit (bool): submit job to engine immediately (default: True if engine and image are
both supplied, False otherwise)
inputs (Mapping[str,files.FileContainer]): dict containing input file names and their
contents (which can be either a FileContainer or just a string)
on_status_update (callable): function that can be called as ``func(job)``; will be called
locally whenever the job's status field is updated
withdocker (bool): whether this job needs access to a docker daemon
when_finished (callable): function that can be called as ``func(job)``; will be called
locally once, when this job completes
numcpus (int): number of CPUs required (default:1)
runtime (int): kill job if the runtime exceeds this value (in seconds) (default: 1 hour)`
engine_options (dict): additional engine-specific options
workingdir (str): working directory in the execution environment (i.e., on the local
system for a subprocess, or inside the container for a docker engine)
env (Dict[str,str]): custom environment variables for the Job
'''
def __init__(self, engine=None,
image=None,
command=None,
name='untitled',
submit=True,
inputs=None,
withdocker=False,
numcpus=1,
runtime=3600,
on_status_update=None,
when_finished=None,
workingdir=None,
engine_options=None,
env=None):
pass
def _reset(self):
pass
def __str__(self):
pass
def __repr__(self):
pass
def __getstate__(self):
pass
def submit(self, wait=False, resubmit=False):
''' Submit this job to the assigned engine.
Args:
wait (bool): wait until the job completes?
resubmit (bool): clear all job info and resubmit the job?
Raises:
ValueError: If the job has been previously submitted (and resubmit=False)
'''
pass
def wait(self):
'''Wait for job to finish'''
pass
@property
def exitcode(self):
pass
@property
def status(self):
'''
Returns status of 'queued', 'running', 'finished' or 'error'
'''
pass
@property
def stopped(self):
pass
def _ensure_finished(self):
'''
To be called after job has finished.
Retreives stdout, stderr, and list of available files
:return:
'''
pass
@property
def result(self):
'''
Returns:
Result of the callback function, if present, otherwise none.
'''
pass
@property
def stdout(self):
pass
@property
def stderr(self):
pass
def get_output(self, filename=None):
'''
Return abstract file references to complete files
- returns a dict if filename is None,
or just the specific file otherwise
'''
pass
def get_directory(self, path):
'''
Get a reference to the directory at the specified path
Note:
This function will succeed even if the specified path
does not exist, as it just generates a reference. You won't
encounter an exception until you actually try to access the
non-existent file
'''
pass
def glob_output(self, pattern):
''' Return dict of all files that match the glob pattern
'''
pass
def get_display_object(self):
'''Return a jupyter widget'''
pass
def dump_all_outputs(self, target, abspaths=None, exist_ok=False, update_references=True):
''' Dump all job outputs to a given directory
Output files under the workign directory will be written to the same relative
path under the directory target
Depending on engine implementation, this is often faster than iterating through
all outputs and writing them one-by-one
Params:
target (str): directory to write outputs to.
abspaths (str): subdirectory under target to write
exist_ok (bool): raise exception if directory already exists
update_references (bool): update internal outputs to reference the dumped files,
rather than their original locations
'''
pass
def use_local_output_tree(self, target, abspaths=None):
''' Use to update references after they have been dumped somewhere locally
'''
pass
| 27 | 12 | 11 | 1 | 8 | 3 | 2 | 0.44 | 1 | 7 | 2 | 1 | 20 | 22 | 20 | 20 | 277 | 36 | 168 | 81 | 127 | 74 | 142 | 62 | 120 | 5 | 1 | 3 | 47 |
8,044 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/ui.py
|
pyccc.ui.StatusView
|
class StatusView(Box):
STATUS_STRING = ('<h5>Job: %s</h5>'
'<b>Provider:</b> %s<br>'
'<b>JobId:</b> %s<br>'
'<b>Image: </b>%s<br>'
'<b>Command: </b>%s<br>'
'<b>Status:</b> %s</font>')
def __init__(self, job, **kwargs):
kwargs.setdefault('orientation', 'vertical')
super(StatusView,self).__init__(**kwargs)
self._job = job
stat = job.status
statstring = self.STATUS_STRING % (job.name,
str(job.engine),
job.jobid,
job.image,
job.command,
stat)
try:
statstring += '<br><b>Exit code: </b> %s</br>' % job.exitcode
except JobStillRunning:
pass
text = ipy.HTML(statstring)
if stat == status.QUEUED:
bar_spec = dict(value=1, bar_style='danger')
elif stat == status.RUNNING:
bar_spec = dict(value=50, bar_style='info')
elif stat == status.FINISHED:
bar_spec = dict(value=100, bar_style='success')
else:
bar_spec = dict(value=100, bar_style='danger')
bar = ipy.FloatProgress(**bar_spec)
self.children = [text, bar]
|
class StatusView(Box):
def __init__(self, job, **kwargs):
pass
| 2 | 0 | 29 | 3 | 26 | 0 | 5 | 0 | 1 | 4 | 1 | 0 | 1 | 2 | 1 | 1 | 37 | 4 | 33 | 10 | 31 | 0 | 20 | 10 | 18 | 5 | 1 | 1 | 5 |
8,045 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/stringcontainer.py
|
pyccc.files.stringcontainer.StringContainer
|
class StringContainer(FileReferenceBase):
""" In-memory file stored as a text string
Args:
contents (str OR bytes): contents of the file
name (str): descriptive name for the container (highly optional)
encoding (str): default encoding (for both encoding strings and decoding bytes). If
not specified, default system encoding is used (usually utf-8)
Note:
This handles both unicode (known as `unicode` in py2 and `str` in py3) and raw bytestrings
(`str` in py2 and `bytes` in py3).
"""
def __init__(self, contents, name=None, encoding=ENCODING):
self.source = name
self.sourcetype = 'runtime'
self.localpath = None
self._contents = contents
self.encoding = encoding
def open(self, mode='r', encoding=None):
"""Return file-like object
Args:
mode (str): access mode (only reading modes are supported)
encoding (str): encoding type (only for binary access)
Returns:
io.BytesIO OR io.TextIOWrapper: buffer accessing the file as bytes or characters
"""
access_type = self._get_access_type(mode)
if encoding is None:
encoding = self.encoding
# here, we face the task of returning the correct data type
if access_type == 'b':
if not self._isbytes:
content = self._contents.encode(encoding) # unicode in, bytes out
else:
content = self._contents # bytes in, bytes out
return io.BytesIO(content)
else:
assert access_type == 't'
if PYVERSION == 2 and self._isbytes:
return io.BytesIO(self._contents) # bytes in, bytes out (python 2 only)
elif self._isbytes:
content = self._contents.decode(encoding) # bytes in, unicode out
else:
content = self._contents # unicode in, unicode out
return io.StringIO(content)
def size_bytes(self, encoding=ENCODING):
if not self._isbytes:
return len(self._contents.encode(encoding))
else:
return len(self._contents)
@property
def _isbytes(self):
if PYVERSION == 2:
return not isinstance(self._contents, unicode)
else:
assert PYVERSION >= 3
return isinstance(self._contents, bytes)
def put(self, filename, encoding=None):
"""Write the file to the given path
Args:
filename (str): path to write this file to
encoding (str): file encoding (default: system default)
Returns:
LocalFile: reference to the copy of the file stored at ``filename``
"""
from . import LocalFile
if os.path.isdir(filename) and self.source is None:
raise ValueError("Cannot write this object to "
"directory %s without an explicit filename." % filename)
target = get_target_path(filename, self.source)
if encoding is None:
encoding = self.encoding
if self._isbytes:
kwargs = {'mode': 'wb'}
else:
kwargs = {'mode': 'w', 'encoding': encoding}
with open(target, **kwargs) as outfile:
outfile.write(self._contents)
return LocalFile(target, encoded_with=encoding)
|
class StringContainer(FileReferenceBase):
''' In-memory file stored as a text string
Args:
contents (str OR bytes): contents of the file
name (str): descriptive name for the container (highly optional)
encoding (str): default encoding (for both encoding strings and decoding bytes). If
not specified, default system encoding is used (usually utf-8)
Note:
This handles both unicode (known as `unicode` in py2 and `str` in py3) and raw bytestrings
(`str` in py2 and `bytes` in py3).
'''
def __init__(self, contents, name=None, encoding=ENCODING):
pass
def open(self, mode='r', encoding=None):
'''Return file-like object
Args:
mode (str): access mode (only reading modes are supported)
encoding (str): encoding type (only for binary access)
Returns:
io.BytesIO OR io.TextIOWrapper: buffer accessing the file as bytes or characters
'''
pass
def size_bytes(self, encoding=ENCODING):
pass
@property
def _isbytes(self):
pass
def put(self, filename, encoding=None):
'''Write the file to the given path
Args:
filename (str): path to write this file to
encoding (str): file encoding (default: system default)
Returns:
LocalFile: reference to the copy of the file stored at ``filename``
'''
pass
| 7 | 3 | 16 | 2 | 10 | 4 | 3 | 0.57 | 1 | 3 | 1 | 0 | 5 | 5 | 5 | 12 | 96 | 18 | 53 | 18 | 45 | 30 | 44 | 16 | 37 | 6 | 2 | 2 | 15 |
8,046 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.DockerMachineError
|
class DockerMachineError(Exception):
""" Failures related to connecting to docker machines
"""
|
class DockerMachineError(Exception):
''' Failures related to connecting to docker machines
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 3 | 0 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 3 | 0 | 0 |
8,047 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.EngineError
|
class EngineError(JobExceptionBase):
""" The engine encountered an error while trying to execute this job
"""
|
class EngineError(JobExceptionBase):
''' The engine encountered an error while trying to execute this job
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 11 | 3 | 0 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
8,048 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.EngineTestError
|
class EngineTestError(Exception):
""" Raised when an engine fails basic connection tests
Args:
engine (pyccc.engines.base.EngineBase): the little engine that couldn't
"""
def __init__(self, engine):
super(EngineTestError, self).__init__('Failed to run jobs on Engine: %s' % engine)
|
class EngineTestError(Exception):
''' Raised when an engine fails basic connection tests
Args:
engine (pyccc.engines.base.EngineBase): the little engine that couldn't
'''
def __init__(self, engine):
pass
| 2 | 1 | 2 | 0 | 2 | 0 | 1 | 1.33 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 11 | 8 | 1 | 3 | 2 | 1 | 4 | 3 | 2 | 1 | 1 | 3 | 0 | 1 |
8,049 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.JobErrorState
|
class JobErrorState(EngineError):
""" DEPRECATED in favor of the more accurately named EngineError"
"""
|
class JobErrorState(EngineError):
''' DEPRECATED in favor of the more accurately named EngineError"
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 3 | 0 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 5 | 0 | 0 |
8,050 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.JobExceptionBase
|
class JobExceptionBase(Exception):
def __init__(self, job, msg=None):
self.job = job
self.msg = msg
|
class JobExceptionBase(Exception):
def __init__(self, job, msg=None):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 3 | 1 | 2 | 1 | 11 | 4 | 0 | 4 | 4 | 2 | 0 | 4 | 4 | 2 | 1 | 3 | 0 | 1 |
8,051 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.JobNotFound
|
class JobNotFound(Exception):
""" The requested job was not found
"""
|
class JobNotFound(Exception):
''' The requested job was not found
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 3 | 0 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 3 | 0 | 0 |
8,052 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.JobStillRunning
|
class JobStillRunning(JobExceptionBase):
""" Raised when a job's status is not "Finshed" or "Error"
"""
|
class JobStillRunning(JobExceptionBase):
''' Raised when a job's status is not "Finshed" or "Error"
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 3 | 0 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
8,053 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.NotARegularFileError
|
class NotARegularFileError(Exception):
""" The requested path exists but does not correspond to a regular file
"""
|
class NotARegularFileError(Exception):
''' The requested path exists but does not correspond to a regular file
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 3 | 0 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 3 | 0 | 0 |
8,054 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/job.py
|
pyccc.job.EngineFunction
|
class EngineFunction(object):
"""
Allows you to call
job.engine.function_name(job) by just calling
job.function_name()
"""
def __init__(self, function_name):
self.name = function_name
def __get__(self, obj, owner):
func = getattr(obj.engine, self.name)
return lambda: func(obj)
|
class EngineFunction(object):
'''
Allows you to call
job.engine.function_name(job) by just calling
job.function_name()
'''
def __init__(self, function_name):
pass
def __get__(self, obj, owner):
pass
| 3 | 1 | 3 | 0 | 3 | 0 | 1 | 0.83 | 1 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 12 | 1 | 6 | 5 | 3 | 5 | 6 | 5 | 3 | 1 | 1 | 0 | 2 |
8,055 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.ProgramFailure
|
class ProgramFailure(Exception):
def __init__(self, job, msg=None, **kwargs):
try:
self.stdout = job.stdout
except Exception as exc:
self.stdout = exc
stdout_line = 'No stdout returned.'
else:
stdout_line = 'STDOUT: ' + self.stdout
try:
self.stderr = job.stderr
except Exception as exc:
self.stderr = exc
stderr_line = 'No stderr returned.'
else:
stderr_line = 'STDERR: ' + self.stderr
if msg is None:
lines = ['The desired job could not be started in the current execution environment.',
stdout_line,
stderr_line]
msg = '\n'.join(lines)
super().__init__(msg, **kwargs)
|
class ProgramFailure(Exception):
def __init__(self, job, msg=None, **kwargs):
pass
| 2 | 0 | 24 | 3 | 21 | 0 | 4 | 0 | 1 | 1 | 0 | 0 | 1 | 2 | 1 | 11 | 25 | 3 | 22 | 8 | 20 | 0 | 20 | 7 | 18 | 4 | 3 | 1 | 4 |
8,056 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.TimeoutError
|
class TimeoutError(JobExceptionBase):
""" This job's status is not "Finshed" or "Error"
"""
|
class TimeoutError(JobExceptionBase):
''' This job's status is not "Finshed" or "Error"
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 3 | 0 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 4 | 0 | 0 |
8,057 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/exceptions.py
|
pyccc.exceptions.PathError
|
class PathError(Exception):
""" The engine can't fulfill the requested input or output filesystem path
"""
|
class PathError(Exception):
''' The engine can't fulfill the requested input or output filesystem path
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 3 | 0 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 0 | 3 | 0 | 0 |
8,058 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/bytecontainer.py
|
pyccc.files.bytecontainer.BytesContainer
|
class BytesContainer(FileReferenceBase):
""" Holds a file as bytes in memory.
Note:
This class holds a file in memory and is therefore not recommended for large files.
``BytesContainer`` is the reference implementation for a :class:`FileReferenceBase`
subclasses
Args:
contents (bytes): contents of the file
encoded_with (str): encoding of the file (default: system default)
"""
def __init__(self, contents, encoded_with=None, name=None):
self._contents = contents
self.encoded_with = encoded_with
self.source = name
self.localpath = None
self.sourcetype = 'runtime'
def size_bytes(self):
return len(self._contents)
def put(self, filename, encoding=None):
"""Write the file to the given path
Args:
filename(str): path to write this file to
Returns:
LocalFile: reference to the copy of the file stored at ``filename``
"""
from . import LocalFile
if os.path.isdir(filename) and self.source is None:
raise ValueError("Cannot write this object to "
"directory %s without an explicit filename." % filename)
target = get_target_path(filename, self.source)
if (encoding is not None) and (encoding != self.encoded_with):
raise ValueError('%s is already encoded as "%s"' % self, self.encoded_with)
with self.open('rb') as infile, open(target, 'wb') as outfile:
for line in infile:
outfile.write(line)
return LocalFile(target)
def open(self, mode='r', encoding=None):
"""Return file-like object
Args:
mode (str): access mode (only reading modes are supported)
encoding (str): text decoding method for text access (default: system default)
Returns:
io.BytesIO OR io.TextIOWrapper: buffer accessing the file as bytes or characters
"""
access_type = self._get_access_type(mode)
if access_type == 't' and encoding is not None and encoding != self.encoded_with:
warnings.warn('Attempting to decode %s as "%s", but encoding is declared as "%s"'
% (self, encoding, self.encoded_with))
if encoding is None:
encoding = self.encoded_with
buffer = io.BytesIO(self._contents)
if access_type == 'b':
return buffer
else:
return io.TextIOWrapper(buffer, encoding=encoding)
def __str__(self):
return 'In-memory file from %s (%d chars)' % (self.source, len(self._contents))
def __repr__(self):
return '<%s @ %s>' % (self.__str__(), id(self))
|
class BytesContainer(FileReferenceBase):
''' Holds a file as bytes in memory.
Note:
This class holds a file in memory and is therefore not recommended for large files.
``BytesContainer`` is the reference implementation for a :class:`FileReferenceBase`
subclasses
Args:
contents (bytes): contents of the file
encoded_with (str): encoding of the file (default: system default)
'''
def __init__(self, contents, encoded_with=None, name=None):
pass
def size_bytes(self):
pass
def put(self, filename, encoding=None):
'''Write the file to the given path
Args:
filename(str): path to write this file to
Returns:
LocalFile: reference to the copy of the file stored at ``filename``
'''
pass
def open(self, mode='r', encoding=None):
'''Return file-like object
Args:
mode (str): access mode (only reading modes are supported)
encoding (str): text decoding method for text access (default: system default)
Returns:
io.BytesIO OR io.TextIOWrapper: buffer accessing the file as bytes or characters
'''
pass
def __str__(self):
pass
def __repr__(self):
pass
| 7 | 3 | 10 | 2 | 6 | 2 | 2 | 0.59 | 1 | 2 | 1 | 2 | 6 | 5 | 6 | 13 | 78 | 19 | 37 | 18 | 29 | 22 | 34 | 17 | 26 | 4 | 2 | 2 | 12 |
8,059 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/remotefiles.py
|
pyccc.files.remotefiles._FetchFunction
|
class _FetchFunction(object):
"""Convenience descriptor for methods that need to download a file before doing anything"""
def __init__(self, funcname):
self.funcname = funcname
def __get__(self, instance, cls):
instance.download()
return getattr(super(LazyFetcherBase, instance), self.funcname)
|
class _FetchFunction(object):
'''Convenience descriptor for methods that need to download a file before doing anything'''
def __init__(self, funcname):
pass
def __get__(self, instance, cls):
pass
| 3 | 1 | 3 | 0 | 3 | 0 | 1 | 0.17 | 1 | 2 | 1 | 0 | 2 | 1 | 2 | 2 | 8 | 1 | 6 | 4 | 3 | 1 | 6 | 4 | 3 | 1 | 1 | 0 | 2 |
8,060 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/remotefiles.py
|
pyccc.files.remotefiles.LazyDockerCopy
|
class LazyDockerCopy(LazyFetcherBase):
"""
Lazily copies the file from the worker.
This is, of course, problematic if the worker is not accessible from the client.
"""
def __init__(self, dockerhost, containerid, containerpath):
self.source = "%s (%s)://%s" % (dockerhost, containerid, containerpath)
self.sourcetype = 'Docker container'
self.dockerhost = dockerhost
self.containerpath = containerpath
self.containerid = containerid
self.basename = os.path.basename(containerpath)
super(LazyDockerCopy, self).__init__()
def _fetch(self):
import shutil
# extracts the stream into a disk-spooled file-like object
tarfile_path = os.path.basename(self.containerpath)
stream = self._get_tarstream()
try:
with tempfile.SpooledTemporaryFile() as buffer:
for d in stream:
buffer.write(d)
buffer.seek(0)
tar = tarfile.open(fileobj=buffer)
filestream = tar.extractfile(tarfile_path)
if filestream is None:
fileinfo = tar.getmember(tarfile_path)
if fileinfo.type == tarfile.DIRTYPE:
from future.utils import PY2
if PY2:
import errno
raise OSError(self, errno=errno.EISDIR)
else:
raise IsADirectoryError(self)
else:
raise exceptions.NotARegularFileError(self)
self._open_tmpfile()
try:
shutil.copyfileobj(filestream, self.tmpfile)
finally:
self.tmpfile.close()
finally:
stream.close()
self.localpath = self.tmpfile.name
self._fetched = True
def _get_tarstream(self):
from .. import docker_utils as du
client = du.get_docker_apiclient(**self.dockerhost)
args = (self.containerid, self.containerpath)
if hasattr(client, 'get_archive'): # handle different docker-py versions
request, meta = client.get_archive(*args)
else:
request = client.copy(*args)
return request
|
class LazyDockerCopy(LazyFetcherBase):
'''
Lazily copies the file from the worker.
This is, of course, problematic if the worker is not accessible from the client.
'''
def __init__(self, dockerhost, containerid, containerpath):
pass
def _fetch(self):
pass
def _get_tarstream(self):
pass
| 4 | 1 | 18 | 2 | 16 | 1 | 3 | 0.12 | 1 | 5 | 1 | 1 | 3 | 8 | 3 | 36 | 62 | 8 | 49 | 26 | 41 | 6 | 44 | 25 | 36 | 5 | 7 | 5 | 8 |
8,061 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/remotefiles.py
|
pyccc.files.remotefiles.HttpContainer
|
class HttpContainer(LazyFetcherBase):
"""
Lazily fetched file on the web. Will cache it in CACHEDIR ('/tmp/cyborgcache')
by default, then treat it as a CachedFile class
Unlike other remote files, this one doesn't preserve the file contents when pickled;
instead, we assume that it should still be accessible (not always a good assumption!)
"""
def __reduce__(self):
return self.__class__, (self.source,)
def __init__(self, url):
self.source = url
self.sourcetype = 'HTTP request'
super(HttpContainer, self).__init__()
def _fetch(self):
self._open_tmpfile()
request = requests.get(self.source)
self.tmpfile.write(request.content)
self.tmpfile.close()
self.localpath = self.tmpfile.name
self._fetched = True
|
class HttpContainer(LazyFetcherBase):
'''
Lazily fetched file on the web. Will cache it in CACHEDIR ('/tmp/cyborgcache')
by default, then treat it as a CachedFile class
Unlike other remote files, this one doesn't preserve the file contents when pickled;
instead, we assume that it should still be accessible (not always a good assumption!)
'''
def __reduce__(self):
pass
def __init__(self, url):
pass
def _fetch(self):
pass
| 4 | 1 | 4 | 0 | 4 | 0 | 1 | 0.43 | 1 | 1 | 0 | 0 | 3 | 4 | 3 | 36 | 23 | 3 | 14 | 9 | 10 | 6 | 14 | 9 | 10 | 1 | 7 | 0 | 3 |
8,062 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/bytecontainer.py
|
pyccc.files.bytecontainer.BZ2String
|
class BZ2String(BytesContainer):
""" BZ2-compressed file
"""
def __init__(self, contents, *args, **kwargs):
self._size = len(contents)
super(BZ2String, self).__init__(contents, *args, **kwargs)
def size_bytes(self):
return self._size
@property
def _contents(self):
import bz2
return bz2.decompress(self._contents)
@_contents.setter
def _contents(self, value):
import bz2
self._contents = bz2.compress(value, 1)
def __str__(self):
return 'In-memory BZ2-compressed file from %s' % self.source
|
class BZ2String(BytesContainer):
''' BZ2-compressed file
'''
def __init__(self, contents, *args, **kwargs):
pass
def size_bytes(self):
pass
@property
def _contents(self):
pass
@_contents.setter
def _contents(self):
pass
def __str__(self):
pass
| 8 | 1 | 3 | 0 | 3 | 0 | 1 | 0.13 | 1 | 1 | 0 | 0 | 5 | 1 | 5 | 18 | 22 | 4 | 16 | 11 | 6 | 2 | 14 | 9 | 6 | 1 | 3 | 0 | 5 |
8,063 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/localfiles.py
|
pyccc.files.localfiles.FileContainer
|
class FileContainer(BytesContainer):
""" In-memory file reference.
Args:
filename (str): name of the file to read
encoded_with (str): encoding of the file (default: system default)
"""
def __init__(self, filename, encoded_with=None):
with open(filename, 'rb') as infile:
contents = infile.read()
super().__init__(contents, encoded_with=encoded_with, name=filename)
self.source = filename
self.sourcetype = '%s' % socket.gethostname()
self.localpath = None
def __str__(self):
return 'In-memory file from %s: %s (%d chars)' % (self.sourcetype, self.source, len(self._contents))
def __repr__(self):
return '<%s>' % self.__str__()
|
class FileContainer(BytesContainer):
''' In-memory file reference.
Args:
filename (str): name of the file to read
encoded_with (str): encoding of the file (default: system default)
'''
def __init__(self, filename, encoded_with=None):
pass
def __str__(self):
pass
def __repr__(self):
pass
| 4 | 1 | 4 | 0 | 4 | 0 | 1 | 0.42 | 1 | 1 | 0 | 1 | 3 | 3 | 3 | 16 | 21 | 4 | 12 | 9 | 8 | 5 | 12 | 8 | 8 | 1 | 3 | 1 | 3 |
8,064 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/localfiles.py
|
pyccc.files.localfiles.CachedFile
|
class CachedFile(LocalFile):
"""
Store a copy of the file in a caching directory; delete it if this goes out of scope.
If pickled, the file gets slurped into memory.
"""
def __init__(self, filecontainer):
self.source = filecontainer.source
self.sourcetype = filecontainer.sourcetype
self.localpath = self._open_tmpfile()
filecontainer.put(self.localpath)
def _open_tmpfile(self, **kwargs):
"""
Open a temporary, unique file in CACHEDIR (/tmp/cyborgcache) by default.
Leave it open, assign file handle to self.tmpfile
**kwargs are passed to tempfile.NamedTemporaryFile
"""
self.tmpfile = get_tempfile(**kwargs)
path = self.tmpfile.name
return path
def __str__(self):
return 'Cached file from %s @ %s' % (self.source, self.localpath)
|
class CachedFile(LocalFile):
'''
Store a copy of the file in a caching directory; delete it if this goes out of scope.
If pickled, the file gets slurped into memory.
'''
def __init__(self, filecontainer):
pass
def _open_tmpfile(self, **kwargs):
'''
Open a temporary, unique file in CACHEDIR (/tmp/cyborgcache) by default.
Leave it open, assign file handle to self.tmpfile
**kwargs are passed to tempfile.NamedTemporaryFile
'''
pass
def __str__(self):
pass
| 4 | 2 | 6 | 0 | 4 | 2 | 1 | 0.75 | 1 | 0 | 0 | 1 | 3 | 4 | 3 | 28 | 24 | 3 | 12 | 9 | 8 | 9 | 12 | 9 | 8 | 1 | 5 | 0 | 3 |
8,065 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/localfiles.py
|
pyccc.files.localfiles.LocalFile
|
class LocalFile(FileContainer):
""" A reference to a local file.
Note:
This class is not designed to work with files that will be moved or edited.
Args:
path (str): path to file
check_exists (bool): make sure the file exists when this object is created
"""
def __reduce__(self):
return StringContainer, (self.read(),)
def __init__(self, path, encoded_with=None, check_exists=True):
if check_exists and not os.path.exists(path):
raise IOError('File not found: %s' % path)
self.source = path
self.localpath = self.source
self.sourcetype = 'Local file on %s' % socket.gethostname()
self.encoded_with = encoded_with
def size_bytes(self):
import os
return os.path.getsize(self.localpath)
def put(self, filename, encoding=None):
target = get_target_path(filename, self.source)
if encoding is not None:
raise ValueError('Cannot encode as %s - this file is already encoded')
shutil.copy(self.localpath, target)
return LocalFile(target)
def open(self, mode='r', encoding=None):
"""Return file-like object (actually opens the file for this class)"""
access_type = self._get_access_type(mode)
return open(self.localpath, 'r'+access_type, encoding=encoding)
def read(self, mode='r', encoding=None):
return self.open(mode=mode, encoding=encoding).read()
def __iter__(self):
return iter(self.open())
def readlines(self):
return self.open().readlines()
def __str__(self):
return 'Local file reference %s' % self.source
|
class LocalFile(FileContainer):
''' A reference to a local file.
Note:
This class is not designed to work with files that will be moved or edited.
Args:
path (str): path to file
check_exists (bool): make sure the file exists when this object is created
'''
def __reduce__(self):
pass
def __init__(self, path, encoded_with=None, check_exists=True):
pass
def size_bytes(self):
pass
def put(self, filename, encoding=None):
pass
def open(self, mode='r', encoding=None):
'''Return file-like object (actually opens the file for this class)'''
pass
def read(self, mode='r', encoding=None):
pass
def __iter__(self):
pass
def readlines(self):
pass
def __str__(self):
pass
| 10 | 2 | 3 | 0 | 3 | 0 | 1 | 0.27 | 1 | 2 | 1 | 1 | 9 | 4 | 9 | 25 | 50 | 12 | 30 | 17 | 19 | 8 | 30 | 17 | 19 | 2 | 4 | 1 | 11 |
8,066 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/directory.py
|
pyccc.files.directory.DockerArchive
|
class DockerArchive(DirectoryArchive, LazyDockerCopy):
"""
Reference to an archived directory from a docker container.
Notes:
- This is currently a bit of a frankenclass
- Because it requires access to a docker daemon, this object is not particularly portable.
"""
def __init__(self, *args, **kwargs):
LazyDockerCopy.__init__(self, *args, **kwargs)
self.archive_path = None
self.dirname = self.basename
def put(self, destination):
""" Copy the referenced directory to this path
Args:
destination (str): path to put this directory (which must NOT already exist)
"""
if not self._fetched:
self._fetch()
DirectoryArchive.put(self, destination)
put.__doc__ = DirectoryArchive.put.__doc__
def _fetch(self):
self.archive_path = self._open_tmpfile()
stream = self._get_tarstream()
for d in stream:
self.tmpfile.write(d)
stream.close()
self.tmpfile.close()
|
class DockerArchive(DirectoryArchive, LazyDockerCopy):
'''
Reference to an archived directory from a docker container.
Notes:
- This is currently a bit of a frankenclass
- Because it requires access to a docker daemon, this object is not particularly portable.
'''
def __init__(self, *args, **kwargs):
pass
def put(self, destination):
''' Copy the referenced directory to this path
Args:
destination (str): path to put this directory (which must NOT already exist)
'''
pass
def _fetch(self):
pass
| 4 | 2 | 7 | 0 | 5 | 1 | 2 | 0.59 | 2 | 0 | 0 | 0 | 3 | 2 | 3 | 41 | 32 | 5 | 17 | 8 | 13 | 10 | 17 | 8 | 13 | 2 | 8 | 1 | 5 |
8,067 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/directory.py
|
pyccc.files.directory.DirectoryReference
|
class DirectoryReference(object):
pass
|
class DirectoryReference(object):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 2 | 0 | 0 | 0 | 0 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 1 | 0 | 0 |
8,068 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/directory.py
|
pyccc.files.directory.DirectoryArchive
|
class DirectoryArchive(DirectoryReference):
"""A tar (or tar.gz) archive of a directory
All files in this directory must be under a directory named "dirname". No other files
will be expanded
Args:
archive_path (str): path to the existing archive
dirname (str): name that this directory reference expands to (this is not checked!)
"""
def __init__(self, archive_path, dirname):
self.archive_path = archive_path
self.dirname = dirname
def put(self, destination):
""" Copy the referenced directory to this path
Note:
This ignores anything not in the desired directory, given by ``self.dirname``.
Args:
destination (str): path to put this directory (which must NOT already exist)
References:
https://stackoverflow.com/a/8261083/1958900
"""
target = get_target_path(destination, self.dirname)
valid_paths = (self.dirname, './%s' % self.dirname)
with tarfile.open(self.archive_path, 'r:*') as tf:
members = []
for tarinfo in tf:
# Get only files under the directory `self.dirname`
pathsplit = os.path.normpath(tarinfo.path).split(os.sep)
if pathsplit[0] not in valid_paths:
print('WARNING: skipped file "%s" in archive; not in directory "%s"' %
(tarinfo.path, self.dirname))
continue
if len(pathsplit) == 1:
continue
tarinfo.name = os.path.join(*pathsplit[1:])
members.append(tarinfo)
if not members:
raise ValueError("No files under path directory '%s' in this tarfile")
tf.extractall(target, members)
|
class DirectoryArchive(DirectoryReference):
'''A tar (or tar.gz) archive of a directory
All files in this directory must be under a directory named "dirname". No other files
will be expanded
Args:
archive_path (str): path to the existing archive
dirname (str): name that this directory reference expands to (this is not checked!)
'''
def __init__(self, archive_path, dirname):
pass
def put(self, destination):
''' Copy the referenced directory to this path
Note:
This ignores anything not in the desired directory, given by ``self.dirname``.
Args:
destination (str): path to put this directory (which must NOT already exist)
References:
https://stackoverflow.com/a/8261083/1958900
'''
pass
| 3 | 2 | 18 | 3 | 11 | 5 | 3 | 0.73 | 1 | 1 | 0 | 1 | 2 | 2 | 2 | 2 | 47 | 9 | 22 | 11 | 19 | 16 | 21 | 10 | 18 | 5 | 2 | 3 | 6 |
8,069 |
Autodesk/pyccc
|
Autodesk_pyccc/pyccc/files/directory.py
|
pyccc.files.directory.LocalDirectoryReference
|
class LocalDirectoryReference(DirectoryReference):
""" This is a reference to a specific directory on the local filesystem.
This allows entire directories to be staged into the dockerfile as input
"""
def __init__(self, localpath):
self.localpath = localpath
def put(self, destination):
""" Copy the referenced directory to this path
The semantics of this command are similar to unix ``cp``: if ``destination`` already
exists, the copied directory will be put at ``[destination] // [basename(localpath)]``. If
it does not already exist, the directory will be renamed to this path (the parent directory
must exist).
Args:
destination (str): path to put this directory
"""
target = get_target_path(destination, self.localpath)
shutil.copytree(self.localpath, target)
|
class LocalDirectoryReference(DirectoryReference):
''' This is a reference to a specific directory on the local filesystem.
This allows entire directories to be staged into the dockerfile as input
'''
def __init__(self, localpath):
pass
def put(self, destination):
''' Copy the referenced directory to this path
The semantics of this command are similar to unix ``cp``: if ``destination`` already
exists, the copied directory will be put at ``[destination] // [basename(localpath)]``. If
it does not already exist, the directory will be renamed to this path (the parent directory
must exist).
Args:
destination (str): path to put this directory
'''
pass
| 3 | 2 | 8 | 1 | 3 | 4 | 1 | 1.83 | 1 | 0 | 0 | 0 | 2 | 1 | 2 | 2 | 21 | 4 | 6 | 5 | 3 | 11 | 6 | 5 | 3 | 1 | 2 | 0 | 2 |
8,070 |
AutomatedTester/Bugsy
|
AutomatedTester_Bugsy/bugsy/errors.py
|
bugsy.errors.BugsyException
|
class BugsyException(Exception):
"""
If while interacting with Bugzilla and we try do something that is not
supported this error will be raised.
"""
def __init__(self, msg, error_code=None):
self.msg = msg
self.code = error_code
def __str__(self):
return "Message: {message} Code: {code}".format(message=self.msg,
code=self.code)
|
class BugsyException(Exception):
'''
If while interacting with Bugzilla and we try do something that is not
supported this error will be raised.
'''
def __init__(self, msg, error_code=None):
pass
def __str__(self):
pass
| 3 | 1 | 3 | 0 | 3 | 0 | 1 | 0.57 | 1 | 0 | 0 | 4 | 2 | 2 | 2 | 12 | 12 | 1 | 7 | 5 | 4 | 4 | 6 | 5 | 3 | 1 | 3 | 0 | 2 |
8,071 |
AutomatedTester/Bugsy
|
AutomatedTester_Bugsy/bugsy/errors.py
|
bugsy.errors.LoginException
|
class LoginException(BugsyException):
"""
If a username and password are passed in but we don't receive a token
then this error will be raised.
"""
pass
|
class LoginException(BugsyException):
'''
If a username and password are passed in but we don't receive a token
then this error will be raised.
'''
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 6 | 0 | 2 | 1 | 1 | 4 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
8,072 |
AutomatedTester/Bugsy
|
AutomatedTester_Bugsy/bugsy/errors.py
|
bugsy.errors.BugException
|
class BugException(BugsyException):
"""
If we try do something that is not allowed to a bug then
this error is raised
"""
pass
|
class BugException(BugsyException):
'''
If we try do something that is not allowed to a bug then
this error is raised
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 6 | 0 | 2 | 1 | 1 | 4 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
8,073 |
AutomatedTester/Bugsy
|
AutomatedTester_Bugsy/bugsy/errors.py
|
bugsy.errors.AttachmentException
|
class AttachmentException(BugsyException):
"""
If we try do something that is not allowed to an attachment then
this error is raised
"""
pass
|
class AttachmentException(BugsyException):
'''
If we try do something that is not allowed to an attachment then
this error is raised
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 6 | 0 | 2 | 1 | 1 | 4 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
8,074 |
AutomatedTester/Bugsy
|
AutomatedTester_Bugsy/bugsy/bugsy.py
|
bugsy.bugsy.Bugsy
|
class Bugsy(object):
"""
Bugsy allows easy getting and putting of Bugzilla bugs
"""
DEFAULT_SEARCH = ['version', 'id', 'summary', 'status', 'op_sys',
'resolution', 'product', 'component', 'platform',
'whiteboard']
def __init__(
self,
username=None,
password=None,
userid=None,
cookie=None,
api_key=None,
bugzilla_url='https://bugzilla.mozilla.org/rest'
):
"""
Initialises a new instance of Bugsy
:param username: Username to login with. Defaults to None
:param password: Password to login with. Defaults to None
:param userid: User ID to login with. Defaults to None
:param cookie: Cookie to login with. Defaults to None
:param apikey: API key to use. Defaults to None.
:param bugzilla_url: URL endpoint to interact with. Defaults to
https://bugzilla.mozilla.org/rest
If a api_key is passed in, Bugsy will use this for authenticating
requests. While not required to perform requests, if a username is
passed in along with api_key, we will validate that the api key is
valid for this username. Otherwise the api key is blindly used
later.
If a username AND password are passed in Bugsy will try get a login
token from Bugzilla. If we can't login then a LoginException will
be raised.
If a userid AND cookie are passed in Bugsy will create a login
token from them.
If no username was passed in it will then try to get the username
from Bugzilla.
"""
self.api_key = api_key
self.username = username
self.password = password
self.userid = userid
self.cookie = cookie
self.bugzilla_url = bugzilla_url
self.token = None
self.session = requests.Session()
self._have_auth = False
# Prefer API keys over all other auth methods.
if self.api_key:
if self.username:
result = self.request(
'valid_login',
headers={'X-Bugzilla-API-Key': self.api_key},
params={'login': self.username}
)
if type(result) == bool and not result:
raise LoginException("login name doesn't match api key")
elif type(result) == dict:
raise LoginException(result['message'])
# Bugzilla 5.1+
self.session.headers['X-Bugzilla-API-Key'] = self.api_key
# Bugzilla 5.0
self.session.params["Bugzilla_api_key"] = self.api_key
self._have_auth = True
elif self.username and self.password:
result = self.request(
'login',
headers={
'X-Bugzilla-Login': username,
'X-Bugzilla-Password': password
}
)
if 'token' in result:
self.session.headers['X-Bugzilla-Token'] = result['token']
self.token = result['token']
else:
raise LoginException(result['message'])
self._have_auth = True
elif self.userid and self.cookie:
# The token is crafted from the userid and cookie.
self.token = '%s-%s' % (self.userid, self.cookie)
self.session.headers['X-Bugzilla-Token'] = self.token
if not self.username:
result = self.request('user/%s' % self.userid)
if result.get('users', []):
self.username = result['users'][0]['name']
else:
raise LoginException(result['message'])
self._have_auth = True
@property
def authenticated(self):
"""
True if this instance is authenticated against the server.
>>> bugzilla = Bugsy()
>>> assert not bugzilla.authenticated
"""
return self._have_auth
def get(self, bug_number, include_fields=None):
"""
Get a bug from Bugzilla. If there is a login token created during
object initialisation it will be part of the query string passed to
Bugzilla
:param bug_number: Bug Number that will be searched. If found will
return a Bug object.
:param include_fields: A string or list of fields or field filters
to include in the response output
>>> bugzilla = Bugsy()
>>> bug = bugzilla.get(123456)
"""
fields = include_fields if include_fields else self.DEFAULT_SEARCH
bug = self.request(
'bug/%s' % bug_number,
params={"include_fields": fields}
)
return Bug(self, **bug['bugs'][0])
def put(self, bug):
"""
This method allows you to create or update a bug on Bugzilla. You
will have had to pass in a valid username and password to the
object initialisation and recieved back a token.
:param bug: A Bug object either created by hand or by using get()
If there is no valid token then a BugsyException will be raised.
If the object passed in is not a Bug then a BugsyException will
be raised.
>>> bugzilla = Bugsy()
>>> bug = bugzilla.get(123456)
>>> bug.summary = "I like cheese and sausages"
>>> bugzilla.put(bug)
"""
if not self._have_auth:
raise BugsyException("Unfortunately you can't put bugs in Bugzilla"
" without credentials")
if not isinstance(bug, Bug):
raise BugsyException("Please pass in a Bug object when posting"
" to Bugzilla")
if not bug.id:
result = self.request('bug', 'POST', json=bug.to_dict())
if 'error' not in result:
bug.id = result['id']
bug._bugsy = self
try:
bug._bug.pop('comment')
except Exception:
# If we don't have a `comment` we will error so let's just
# swallow it.
pass
else:
raise BugsyException(result['message'])
else:
result = self.request('bug/%s' % bug.id, 'PUT',
json=bug.diff())
updated_bug = self.get(bug.id)
return updated_bug
@property
def search_for(self):
return Search(self)
def request(self, path, method='GET', headers=None, **kwargs):
"""Perform a HTTP request.
Given a relative Bugzilla URL path, an optional request method,
and arguments suitable for requests.Request(), perform a
HTTP request.
"""
headers = {} if headers is None else headers.copy()
headers["User-Agent"] = "Bugsy"
kwargs['headers'] = headers
url = '%s/%s' % (self.bugzilla_url, path)
return self._handle_errors(self.session.request(method, url, **kwargs))
def _handle_errors(self, response):
if response.status_code >= 500:
raise BugsyException("We received a {0} error with the following: {1}"
.format(response.status_code, response.text))
result = response.json()
if (response.status_code > 399 and response.status_code < 500) \
or (isinstance(result, dict) and 'error' in result and
result.get('error', False) is True):
if "API key" in result['message'] or "username or password" in result['message']:
raise LoginException(result['message'], result.get("code"))
else:
raise BugsyException(result["message"], result.get("code"))
return result
|
class Bugsy(object):
'''
Bugsy allows easy getting and putting of Bugzilla bugs
'''
def __init__(
self,
username=None,
password=None,
userid=None,
cookie=None,
api_key=None,
bugzilla_url='https://bugzilla.mozilla.org/rest'
):
'''
Initialises a new instance of Bugsy
:param username: Username to login with. Defaults to None
:param password: Password to login with. Defaults to None
:param userid: User ID to login with. Defaults to None
:param cookie: Cookie to login with. Defaults to None
:param apikey: API key to use. Defaults to None.
:param bugzilla_url: URL endpoint to interact with. Defaults to
https://bugzilla.mozilla.org/rest
If a api_key is passed in, Bugsy will use this for authenticating
requests. While not required to perform requests, if a username is
passed in along with api_key, we will validate that the api key is
valid for this username. Otherwise the api key is blindly used
later.
If a username AND password are passed in Bugsy will try get a login
token from Bugzilla. If we can't login then a LoginException will
be raised.
If a userid AND cookie are passed in Bugsy will create a login
token from them.
If no username was passed in it will then try to get the username
from Bugzilla.
'''
pass
@property
def authenticated(self):
'''
True if this instance is authenticated against the server.
>>> bugzilla = Bugsy()
>>> assert not bugzilla.authenticated
'''
pass
def get(self, bug_number, include_fields=None):
'''
Get a bug from Bugzilla. If there is a login token created during
object initialisation it will be part of the query string passed to
Bugzilla
:param bug_number: Bug Number that will be searched. If found will
return a Bug object.
:param include_fields: A string or list of fields or field filters
to include in the response output
>>> bugzilla = Bugsy()
>>> bug = bugzilla.get(123456)
'''
pass
def put(self, bug):
'''
This method allows you to create or update a bug on Bugzilla. You
will have had to pass in a valid username and password to the
object initialisation and recieved back a token.
:param bug: A Bug object either created by hand or by using get()
If there is no valid token then a BugsyException will be raised.
If the object passed in is not a Bug then a BugsyException will
be raised.
>>> bugzilla = Bugsy()
>>> bug = bugzilla.get(123456)
>>> bug.summary = "I like cheese and sausages"
>>> bugzilla.put(bug)
'''
pass
@property
def search_for(self):
pass
def request(self, path, method='GET', headers=None, **kwargs):
'''Perform a HTTP request.
Given a relative Bugzilla URL path, an optional request method,
and arguments suitable for requests.Request(), perform a
HTTP request.
'''
pass
def _handle_errors(self, response):
pass
| 10 | 6 | 27 | 3 | 16 | 9 | 4 | 0.57 | 1 | 9 | 4 | 0 | 7 | 9 | 7 | 7 | 209 | 29 | 115 | 35 | 97 | 65 | 76 | 25 | 68 | 10 | 1 | 3 | 26 |
8,075 |
AutomatedTester/Bugsy
|
AutomatedTester_Bugsy/bugsy/bug.py
|
bugsy.bug.Comment
|
class Comment(object):
"""
Represents a single Bugzilla comment.
To get comments you need to do the following
>>> bugs = bugzilla.search_for.keywords("checkin-needed").search()
>>> comments = bugs[0].get_comments()
>>> # Returns the comment 0 of the first checkin-needed bug
>>> comments[0].text
"""
def __init__(self, bugsy=None, **kwargs):
self._bugsy = bugsy
kwargs['time'] = str2datetime(kwargs['time'])
kwargs['creation_time'] = str2datetime(kwargs['creation_time'])
if 'tags' in kwargs:
kwargs['tags'] = set(kwargs['tags'])
else:
kwargs['tags'] = set()
self._comment = kwargs
@property
def text(self):
r"""
Return the text that is in this comment
>>> comment.text # David really likes cheese apparently
"""
return self._comment['text']
@property
def id(self):
r"""
Return the comment id that is associated with Bugzilla.
"""
return self._comment['id']
@property
def attachment_id(self):
"""
If the comment was made on an attachment, return the ID of that
attachment. Otherwise it will return None.
"""
return self._comment['attachment_id']
@property
def author(self):
"""
Return the login name of the comment's author.
"""
return self._comment['author']
@property
def creator(self):
"""
Return the login name of the comment's author.
"""
return self._comment['creator']
@property
def bug_id(self):
"""
Return the ID of the bug that this comment is on.
"""
return self._comment['bug_id']
@property
def time(self):
"""
This is exactly same as :attr:`creation_time`.
For compatibility, time is still usable. However, please note
that time may be deprecated and removed in a future release.
Prefer :attr:`creation_time` instead.
"""
return self._comment['time']
@property
def creation_time(self):
"""
Return the time (in Bugzilla's timezone) that the comment was
added.
"""
return self._comment['creation_time']
@property
def is_private(self):
"""
Return True if this comment is private (only visible to a certain
group called the "insidergroup").
"""
return self._comment['is_private']
@property
def tags(self):
"""
Return a set of comment tags currently set for the comment.
"""
return self._comment['tags']
def add_tags(self, tags):
"""
Add tags to the comments
"""
if not isinstance(tags, list):
tags = [tags]
self._bugsy.request('bug/comment/%s/tags' % self._comment['id'],
method='PUT', json={"add": tags})
def remove_tags(self, tags):
"""
Add tags to the comments
"""
if not isinstance(tags, list):
tags = [tags]
self._bugsy.request('bug/comment/%s/tags' % self._comment['id'],
method='PUT', json={"remove": tags})
|
class Comment(object):
'''
Represents a single Bugzilla comment.
To get comments you need to do the following
>>> bugs = bugzilla.search_for.keywords("checkin-needed").search()
>>> comments = bugs[0].get_comments()
>>> # Returns the comment 0 of the first checkin-needed bug
>>> comments[0].text
'''
def __init__(self, bugsy=None, **kwargs):
pass
@property
def text(self):
'''
Return the text that is in this comment
>>> comment.text # David really likes cheese apparently
'''
pass
@property
def id(self):
'''
Return the comment id that is associated with Bugzilla.
'''
pass
@property
def attachment_id(self):
'''
If the comment was made on an attachment, return the ID of that
attachment. Otherwise it will return None.
'''
pass
@property
def author(self):
'''
Return the login name of the comment's author.
'''
pass
@property
def creator(self):
'''
Return the login name of the comment's author.
'''
pass
@property
def bug_id(self):
'''
Return the ID of the bug that this comment is on.
'''
pass
@property
def time(self):
'''
This is exactly same as :attr:`creation_time`.
For compatibility, time is still usable. However, please note
that time may be deprecated and removed in a future release.
Prefer :attr:`creation_time` instead.
'''
pass
@property
def creation_time(self):
'''
Return the time (in Bugzilla's timezone) that the comment was
added.
'''
pass
@property
def is_private(self):
'''
Return True if this comment is private (only visible to a certain
group called the "insidergroup").
'''
pass
@property
def tags(self):
'''
Return a set of comment tags currently set for the comment.
'''
pass
def add_tags(self, tags):
'''
Add tags to the comments
'''
pass
def remove_tags(self, tags):
'''
Add tags to the comments
'''
pass
| 24 | 13 | 7 | 0 | 3 | 3 | 1 | 1.02 | 1 | 2 | 0 | 0 | 13 | 2 | 13 | 13 | 120 | 19 | 50 | 26 | 26 | 51 | 37 | 16 | 23 | 2 | 1 | 1 | 16 |
8,076 |
AutomatedTester/Bugsy
|
AutomatedTester_Bugsy/bugsy/bug.py
|
bugsy.bug.Bug
|
class Bug(object):
"""This represents a Bugzilla Bug"""
def __init__(self, bugsy=None, **kwargs):
"""
Defaults are set if there are no kwargs passed in. To pass in
a dict create the Bug object like the following
:param bugsy: Bugsy instance to use to connect to Bugzilla.
>>> bug = Bug(**myDict)
"""
self._bugsy = bugsy
self._bug = dict(**kwargs)
self._copy = dict(**kwargs)
self._bug['op_sys'] = kwargs.get('op_sys', 'All')
self._bug['product'] = kwargs.get('product', 'core')
self._bug['component'] = kwargs.get('component', 'general')
self._bug['platform'] = kwargs.get('platform', 'All')
self._bug['version'] = kwargs.get('version', 'unspecified')
def __getattr__(self, attr):
if attr not in self._bug:
return None
return self._bug[attr]
def __setattr__(self, attr, value):
if attr == '_bug' or attr == '_copy':
object.__setattr__(self, attr, unpack(value))
elif attr == '_bugsy':
object.__setattr__(self, attr, value)
elif attr == 'status':
if self.id:
if value in VALID_STATUS:
self._bug['status'] = value
else:
raise BugException("Invalid status type was used")
else:
raise BugException("Can not set status unless there is a bug id."
" Please call Update() before setting")
elif attr == 'resolution':
if value in VALID_RESOLUTION:
self._bug['resolution'] = value
else:
raise BugException("Invalid resolution type was used")
elif attr in ARRAY_TYPES and not isinstance(value, list):
raise BugException("Cannot set value to non-list type")
else:
self._bug[attr] = copy.copy(value)
def to_dict(self):
"""
Return the raw dict that is used inside this object
"""
return self._bug
def update(self):
"""
Update this object with the latest changes from Bugzilla
>>> bug.status
'NEW'
#Changes happen on Bugzilla
>>> bug.update()
>>> bug.status
'FIXED'
"""
if 'id' in self._bug:
result = self._bugsy.request('bug/%s' % self._bug['id'])
self._bug = dict(**result['bugs'][0])
self._copy = dict(**result['bugs'][0])
else:
raise BugException("Unable to update bug that isn't in Bugzilla")
def get_comments(self):
"""
Obtain comments for this bug.
Returns a list of Comment instances.
"""
bug = str(self._bug['id'])
res = self._bugsy.request('bug/%s/comment' % bug)
return [Comment(bugsy=self._bugsy, **comments) for comments
in res['bugs'][bug]['comments']]
def add_comment(self, comment):
"""
Adds a comment to a bug. If the bug object does not have a bug ID
(ie you are creating a bug) then you will need to also call `put`
on the :class:`Bugsy` class.
>>> bug.add_comment("I like sausages")
>>> bugzilla.put(bug)
If it does have a bug id then this will immediately post to the server
>>> bug.add_comment("I like eggs too")
More examples can be found at:
https://github.com/AutomatedTester/Bugsy/blob/master/example/add_comments.py
"""
# If we have a key post immediately otherwise hold onto it until
# put(bug) is called
if 'id' in self._bug:
self._bugsy.request('bug/{}/comment'.format(self._bug['id']),
method='POST', json={"comment": comment}
)
else:
self._bug['comment'] = comment
def get_attachments(self):
"""
Obtain comments for this bug.
Returns a list of Comment instances.
"""
bug = str(self._bug['id'])
res = self._bugsy.request(
'bug/%s/attachment' % bug,
)
return [Attachment(bugsy=self._bugsy, **attachments) for attachments
in res['bugs'][bug]]
def add_attachment(self, attachment):
if not self.id:
raise BugException("Cannot add an attachment without a bug id")
attach_dict = attachment.to_dict()
missing = list(set(Attachment.CREATE_REQUIRED) - set(attach_dict.keys()))
if missing:
raise BugException("Cannot add attachment without all required fields")
output = {'ids': [self.id]}
for field in list(set(Attachment.CREATE_FIELDS) & set(attach_dict.keys())):
output[field] = attach_dict[field]
self._bugsy.request('bug/%s/attachment' % self._bug['id'],
method='POST', json=output)
def diff(self):
"""
Generates a dictionary containing only the changed values
Special handling of ARRAY_TYPES fields is required to only PUT changed objects
>>> bug.cc
['foo@bar.com']
>>> bug.cc.append('abc@xyz.com')
>>> bug.cc
['foo@bar.com', 'abc@xyz.com']
>>>bug.diff()
{'cc': {'added': ['abc@xyz.com']}}
"""
changed = {}
for key in self._bug:
if key not in ARRAY_TYPES:
if key not in self._copy or self._bug[key] != self._copy[key]:
changed[key] = self._bug[key]
elif key == 'flags':
if self._bug.get(key, []) != self._copy.get(key, []):
changed[key] = self._bug.get(key, [])
else:
values_now = set(self._bug.get(key, []))
values_orig = set(self._copy.get(key, []))
additions = list(values_now - values_orig)
subtractions = list(values_orig - values_now)
if additions or subtractions:
changed[key] = {}
if len(additions):
changed[key]['add'] = additions
if len(subtractions):
changed[key]['remove'] = subtractions
return changed
|
class Bug(object):
'''This represents a Bugzilla Bug'''
def __init__(self, bugsy=None, **kwargs):
'''
Defaults are set if there are no kwargs passed in. To pass in
a dict create the Bug object like the following
:param bugsy: Bugsy instance to use to connect to Bugzilla.
>>> bug = Bug(**myDict)
'''
pass
def __getattr__(self, attr):
pass
def __setattr__(self, attr, value):
pass
def to_dict(self):
'''
Return the raw dict that is used inside this object
'''
pass
def update(self):
'''
Update this object with the latest changes from Bugzilla
>>> bug.status
'NEW'
#Changes happen on Bugzilla
>>> bug.update()
>>> bug.status
'FIXED'
'''
pass
def get_comments(self):
'''
Obtain comments for this bug.
Returns a list of Comment instances.
'''
pass
def add_comment(self, comment):
'''
Adds a comment to a bug. If the bug object does not have a bug ID
(ie you are creating a bug) then you will need to also call `put`
on the :class:`Bugsy` class.
>>> bug.add_comment("I like sausages")
>>> bugzilla.put(bug)
If it does have a bug id then this will immediately post to the server
>>> bug.add_comment("I like eggs too")
More examples can be found at:
https://github.com/AutomatedTester/Bugsy/blob/master/example/add_comments.py
'''
pass
def get_attachments(self):
'''
Obtain comments for this bug.
Returns a list of Comment instances.
'''
pass
def add_attachment(self, attachment):
pass
def diff(self):
'''
Generates a dictionary containing only the changed values
Special handling of ARRAY_TYPES fields is required to only PUT changed objects
>>> bug.cc
['foo@bar.com']
>>> bug.cc.append('abc@xyz.com')
>>> bug.cc
['foo@bar.com', 'abc@xyz.com']
>>>bug.diff()
{'cc': {'added': ['abc@xyz.com']}}
'''
pass
| 11 | 8 | 16 | 2 | 10 | 5 | 3 | 0.52 | 1 | 7 | 3 | 0 | 10 | 3 | 10 | 10 | 176 | 27 | 98 | 29 | 87 | 51 | 78 | 29 | 67 | 9 | 1 | 4 | 32 |
8,077 |
AutomatedTester/Bugsy
|
AutomatedTester_Bugsy/bugsy/attachment.py
|
bugsy.attachment.Attachment
|
class Attachment(object):
"""
Represents a single Bugzilla attachment.
To get comments you need to do the following
>>> bugs = bugzilla.search_for.keywords("checkin-needed").search()
>>> attachments = bugs[0].get_attachments()
>>> # Returns the attachment's comment
>>> attachments[0].comment
"""
CREATE_REQUIRED = ['data', 'file_name', 'summary', 'content_type']
CREATE_FIELDS = CREATE_REQUIRED + ['comment', 'flags', 'is_markdown', 'is_patch', 'is_private']
UPDATE_FIELDS = ['bug_flags', 'comment', 'content_type', 'file_name', 'flags', 'is_obsolete',
'is_patch', 'is_private', 'summary']
def __init__(self, bugsy, **kwargs):
self._bugsy = bugsy
self._attachment = dict(**kwargs)
self._copy = copy.deepcopy(self._attachment)
def __getattr__(self, attr):
if attr not in self._attachment:
return None
return self._attachment[attr]
def __setattr__(self, attr, value):
if attr.startswith('_'):
if attr == '_bugsy':
object.__setattr__(self, attr, value)
elif attr == '_attachment':
clone = copy.deepcopy(value)
time_fields = {'creation_time', 'last_change_time'}
for time_field in list(time_fields & set(clone.keys())):
clone[time_field] = dt.strptime(clone[time_field], '%Y-%m-%dT%H:%M:%SZ')
object.__setattr__(self, attr, clone)
elif attr == '_copy':
object.__setattr__(self, attr, value)
else:
if attr == 'data':
# Attempt to decode data to ensure it's valid
try:
if hasattr(base64, 'decodebytes'):
base64.decodebytes(value.encode('utf-8'))
else:
base64.decodestring(value)
except binascii.Error:
raise AttachmentException('The data field value must be in base64 format')
elif attr in ['comment', 'content_type', 'file_name', 'summary']:
if not isinstance(value, six.string_types):
raise AttachmentException('The %s field value must be of type string' % attr)
elif attr in ['is_patch', 'is_private']:
if not isinstance(value, bool):
raise AttachmentException('The %s field value must be of type bool' % attr)
elif attr == 'flags' or attr == 'bug_flags':
if not isinstance(value, list):
# ToDo: Once flags are implemented, this should check isInstance(list[i], Flag)
raise AttachmentException('The %s field value must be of type list' % attr)
self._attachment[attr] = copy.copy(value)
def to_dict(self):
"""
Return the raw dict that is used inside this object
"""
return self._attachment
def update(self):
if not self.id:
raise AttachmentException('Cannot update bug without an attachment id')
updates = {}
for k in self._attachment:
if k in self.UPDATE_FIELDS:
updates[k] = self._attachment[k]
res = self._bugsy.request('bug/attachment/%s' % self.id,
method='PUT', json=updates)
self._attachment = res['attachments'][0]
self._copy = copy.deepcopy(self._attachment)
|
class Attachment(object):
'''
Represents a single Bugzilla attachment.
To get comments you need to do the following
>>> bugs = bugzilla.search_for.keywords("checkin-needed").search()
>>> attachments = bugs[0].get_attachments()
>>> # Returns the attachment's comment
>>> attachments[0].comment
'''
def __init__(self, bugsy, **kwargs):
pass
def __getattr__(self, attr):
pass
def __setattr__(self, attr, value):
pass
def to_dict(self):
'''
Return the raw dict that is used inside this object
'''
pass
def update(self):
pass
| 6 | 2 | 12 | 1 | 10 | 1 | 5 | 0.23 | 1 | 7 | 1 | 0 | 5 | 4 | 5 | 5 | 82 | 12 | 57 | 18 | 51 | 13 | 48 | 18 | 42 | 15 | 1 | 4 | 23 |
8,078 |
AutomatedTester/Bugsy
|
AutomatedTester_Bugsy/bugsy/errors.py
|
bugsy.errors.SearchException
|
class SearchException(BugsyException):
"""
If while interacting with Bugzilla and we try do something that is not
supported this error will be raised.
"""
pass
|
class SearchException(BugsyException):
'''
If while interacting with Bugzilla and we try do something that is not
supported this error will be raised.
'''
pass
| 1 | 1 | 0 | 0 | 0 | 0 | 0 | 2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 6 | 0 | 2 | 1 | 1 | 4 | 2 | 1 | 1 | 0 | 4 | 0 | 0 |
8,079 |
AutomatedTester/Bugsy
|
AutomatedTester_Bugsy/bugsy/search.py
|
bugsy.search.Search
|
class Search(object):
"""
This allows searching for bugs in Bugzilla
"""
def __init__(self, bugsy):
"""
Initialises the search object
:param bugsy: Bugsy instance to use to connect to Bugzilla.
"""
self._bugsy = bugsy
self._includefields = copy.copy(bugsy.DEFAULT_SEARCH)
self._keywords = []
self._component = []
self._product = []
self._assigned = []
self._summaries = []
self._whiteboard = []
self._bug_numbers = []
self._time_frame = {}
self._change_history = {"fields": []}
def include_fields(self, *args):
r"""
Include fields is the fields that you want to be returned when
searching. These are in addition to the fields that are always
included below.
:param args: items passed in will be turned into a list
:returns: :class:`Search`
>>> bugzilla.search_for.include_fields("flags")
The following fields are always included in search:
'version', 'id', 'summary', 'status', 'op_sys',
'resolution', 'product', 'component', 'platform'
"""
for arg in args:
self._includefields.append(arg)
return self
def component(self, *components):
r"""
When search() is called it will limit results to items in a component.
:param component: items passed in will be turned into a list
:returns: :class:`Search`
"""
for component in components:
self._component.append(component)
return self
def product(self, *products):
r"""
When search is called, it will limit the results to items in a Product.
:param product: items passed in will be turned into a list
:returns: :class:`Search`
"""
for product in products:
self._product.append(product)
return self
def keywords(self, *args):
r"""
When search() is called it will search for the keywords passed
in here
:param args: items passed in will be turned into a list
:returns: :class:`Search`
>>> bugzilla.search_for.keywords("checkin-needed")
"""
self._keywords = list(args)
return self
def assigned_to(self, *args):
r"""
When search() is called it will search for bugs assigned to these
users
:param args: items passed in will be turned into a list
:returns: :class:`Search`
>>> bugzilla.search_for.assigned_to("dburns@mozilla.com")
"""
self._assigned = list(args)
return self
def summary(self, *args):
r"""
When search is called it will search for bugs with the words
passed into the methods
:param args: items passed in will be turned into a list
:returns: :class:`Search`
>>> bugzilla.search_for.summary("663399")
"""
self._summaries = list(args)
return self
def whiteboard(self, *args):
r"""
When search is called it will search for bugs with the words
passed into the methods
:param args: items passed in will be turned into a list
:returns: :class:`Search`
>>> bugzilla.search_for.whiteboard("affects")
"""
self._whiteboard = list(args)
return self
def bug_number(self, bug_numbers):
r"""
When you want to search for a bugs and be able to change the fields
returned.
:param bug_numbers: A string for the bug number or a list of
strings
:returns: :class:`Search`
>>> bugzilla.search_for.bug_number(['123123', '123456'])
"""
self._bug_numbers = list(bug_numbers)
return self
def timeframe(self, start, end):
r"""
When you want to search bugs for a certain time frame.
:param start:
:param end:
:returns: :class:`Search`
"""
if start:
self._time_frame['chfieldfrom'] = start
if end:
self._time_frame['chfieldto'] = end
return self
def change_history_fields(self, fields, value=None):
r"""
"""
if not isinstance(fields, list):
raise Exception('fields should be a list')
self._change_history['fields'] = fields
if value:
self._change_history['value'] = value
return self
def search(self):
r"""
Call the Bugzilla endpoint that will do the search. It will take
the information used in other methods on the Search object and
build up the query string. If no bugs are found then an empty list
is returned.
>>> bugs = bugzilla.search_for\
... .keywords("checkin-needed")\
... .include_fields("flags")\
... .search()
"""
params = {}
params.update(self._time_frame.items())
if self._includefields:
params['include_fields'] = list(self._includefields)
if self._bug_numbers:
bugs = []
for bug in self._bug_numbers:
result = self._bugsy.request('bug/%s' % bug,
params=params)
bugs.append(Bug(self._bugsy, **result['bugs'][0]))
return bugs
else:
if self._component:
params['component'] = list(self._component)
if self._product:
params['product'] = list(self._product)
if self._keywords:
params['keywords'] = list(self._keywords)
if self._assigned:
params['assigned_to'] = list(self._assigned)
if self._summaries:
params['short_desc_type'] = 'allwordssubstr'
params['short_desc'] = list(self._summaries)
if self._whiteboard:
params['short_desc_type'] = 'allwordssubstr'
params['whiteboard'] = list(self._whiteboard)
if self._change_history['fields']:
params['chfield'] = self._change_history['fields']
if self._change_history.get('value', None):
params['chfieldvalue'] = self._change_history['value']
try:
results = self._bugsy.request('bug', params=params)
except Exception as e:
raise SearchException(e.msg, e.code)
return [Bug(self._bugsy, **bug) for bug in results['bugs']]
|
class Search(object):
'''
This allows searching for bugs in Bugzilla
'''
def __init__(self, bugsy):
'''
Initialises the search object
:param bugsy: Bugsy instance to use to connect to Bugzilla.
'''
pass
def include_fields(self, *args):
'''
Include fields is the fields that you want to be returned when
searching. These are in addition to the fields that are always
included below.
:param args: items passed in will be turned into a list
:returns: :class:`Search`
>>> bugzilla.search_for.include_fields("flags")
The following fields are always included in search:
'version', 'id', 'summary', 'status', 'op_sys',
'resolution', 'product', 'component', 'platform'
'''
pass
def component(self, *components):
'''
When search() is called it will limit results to items in a component.
:param component: items passed in will be turned into a list
:returns: :class:`Search`
'''
pass
def product(self, *products):
'''
When search is called, it will limit the results to items in a Product.
:param product: items passed in will be turned into a list
:returns: :class:`Search`
'''
pass
def keywords(self, *args):
'''
When search() is called it will search for the keywords passed
in here
:param args: items passed in will be turned into a list
:returns: :class:`Search`
>>> bugzilla.search_for.keywords("checkin-needed")
'''
pass
def assigned_to(self, *args):
'''
When search() is called it will search for bugs assigned to these
users
:param args: items passed in will be turned into a list
:returns: :class:`Search`
>>> bugzilla.search_for.assigned_to("dburns@mozilla.com")
'''
pass
def summary(self, *args):
'''
When search is called it will search for bugs with the words
passed into the methods
:param args: items passed in will be turned into a list
:returns: :class:`Search`
>>> bugzilla.search_for.summary("663399")
'''
pass
def whiteboard(self, *args):
'''
When search is called it will search for bugs with the words
passed into the methods
:param args: items passed in will be turned into a list
:returns: :class:`Search`
>>> bugzilla.search_for.whiteboard("affects")
'''
pass
def bug_number(self, bug_numbers):
'''
When you want to search for a bugs and be able to change the fields
returned.
:param bug_numbers: A string for the bug number or a list of
strings
:returns: :class:`Search`
>>> bugzilla.search_for.bug_number(['123123', '123456'])
'''
pass
def timeframe(self, start, end):
'''
When you want to search bugs for a certain time frame.
:param start:
:param end:
:returns: :class:`Search`
'''
pass
def change_history_fields(self, fields, value=None):
'''
'''
pass
def search(self):
'''
Call the Bugzilla endpoint that will do the search. It will take
the information used in other methods on the Search object and
build up the query string. If no bugs are found then an empty list
is returned.
>>> bugs = bugzilla.search_for\
... .keywords("checkin-needed")\
... .include_fields("flags")\
... .search()
'''
pass
| 13 | 13 | 16 | 2 | 7 | 7 | 3 | 0.92 | 1 | 4 | 2 | 0 | 12 | 11 | 12 | 12 | 207 | 36 | 89 | 33 | 76 | 82 | 87 | 32 | 74 | 13 | 1 | 2 | 31 |
8,080 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/railroads.py
|
routes1846.railroads.Train
|
class Train(object):
@staticmethod
def create(train_str):
parts = train_str.split("/")
collect = int(parts[0].strip())
visit = int((parts[0] if len(parts) == 1 else parts[1]).strip())
if (collect, visit) not in TRAIN_TO_PHASE:
train_str = ", ".join(sorted(TRAIN_TO_PHASE.keys()))
raise ValueError("Invalid train string found. Got ({}, {}), but expected one of {}".format(collect, visit, train_str))
return Train(collect, visit, TRAIN_TO_PHASE[(collect, visit)])
def __init__(self, collect, visit, phase):
self.collect = collect
self.visit = visit
self.phase = phase
def __str__(self):
if self.collect == self.visit:
return str(self.collect)
else:
return "{} / {}".format(self.collect, self.visit)
def __hash__(self):
return hash((self.phase, self.collect, self.visit))
def __eq__(self, other):
return isinstance(other, Train) and \
self.phase == other.phase and \
self.collect == other.collect and \
self.visit == other.visit
|
class Train(object):
@staticmethod
def create(train_str):
pass
def __init__(self, collect, visit, phase):
pass
def __str__(self):
pass
def __hash__(self):
pass
def __eq__(self, other):
pass
| 7 | 0 | 5 | 0 | 5 | 0 | 2 | 0 | 1 | 3 | 0 | 0 | 4 | 3 | 5 | 5 | 32 | 6 | 26 | 13 | 19 | 0 | 21 | 12 | 15 | 3 | 1 | 1 | 8 |
8,081 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/route.py
|
routes1846.route._RunRoute
|
class _RunRoute(object):
def __init__(self, route, visited_city_values, train):
self._route = route
self.city_values = dict.fromkeys(route.cities, 0)
self.city_values.update(visited_city_values)
self.value = sum(self.city_values.values())
self.train = train
self._mail_contract = False
def overlap(self, other):
return self._route.overlap(other._route)
def add_mail_contract(self):
if not self._mail_contract:
self.value += len(self._route.cities) * 10
self._mail_contract = True
@property
def cities(self):
return self._route.cities
@property
def visited_cities(self):
return [city for city in self.cities if self.city_values[city] > 0]
def __str__(self):
return str(self._route)
def __iter__(self):
return iter(self._route)
|
class _RunRoute(object):
def __init__(self, route, visited_city_values, train):
pass
def overlap(self, other):
pass
def add_mail_contract(self):
pass
@property
def cities(self):
pass
@property
def visited_cities(self):
pass
def __str__(self):
pass
def __iter__(self):
pass
| 10 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 2 | 0 | 0 | 7 | 5 | 7 | 7 | 32 | 8 | 24 | 15 | 14 | 0 | 22 | 13 | 14 | 2 | 1 | 1 | 8 |
8,082 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/route.py
|
routes1846.route.Route
|
class Route(object):
@staticmethod
def create(path):
return Route(tuple(path))
@staticmethod
def empty():
return Route(tuple())
@staticmethod
def single(tile):
return Route.create((tile, ))
def __init__(self, path):
self._path = tuple(path)
self._edges = [{path[k-1], path[k]} for k in range(1, len(path))]
def merge(self, route):
return Route.create(self._path + route._path)
def _best_cities(self, train, route_city_values, station_cities, include=None):
always_include = [(city, route_city_values[city]) for city in (include or [])]
# Find the station city to always include
always_include.append(max(station_cities.items(), key=lambda tile_and_value: tile_and_value[1]))
# Remove from consideration the station city and any cities that should always be included.
city_values = route_city_values.copy()
for to_include in always_include:
del city_values[to_include[0]]
# The route can collect cities only after accounting for anything marked always collect
collect = train.collect - len(always_include)
best_cities = dict(heapq.nlargest(collect, city_values.items(), key=lambda city_item: city_item[1]))
# Add back in the cities marked always collect
best_cities.update(dict(always_include))
return best_cities, sum(best_cities.values())
def value(self, board, train, railroad, phase):
route_city_values = {tile: tile.value(railroad, phase) for tile in self if tile.is_city}
station_cells = {station.cell for station in board.stations(railroad.name)}
station_cities = {tile: value for tile, value in route_city_values.items() if tile.cell in station_cells}
best_cities, route_value = self._best_cities(train, route_city_values, station_cities)
# Check if the route runs from east to west.
terminals = [self._path[0], self._path[-1]]
east_to_west = all(isinstance(tile, (EastTerminalCity, WestTerminalCity)) for tile in terminals) and type(terminals[0]) != type(terminals[1])
if east_to_west:
# There is an east-west route. Confirm that a route including those
# terminal cities is the highest value route (including bonuses).
route_city_values_e2w = route_city_values.copy()
route_city_values_e2w.update({terminal: terminal.value(railroad, phase, east_to_west) for terminal in terminals})
best_cities_e2w, route_value_e2w = self._best_cities(train, route_city_values_e2w, station_cities, terminals)
return best_cities_e2w if route_value_e2w >= route_value else best_cities
else:
return best_cities
def overlap(self, other):
for edge in self._edges:
if edge in other._edges:
return True
return False
def subroutes(self, start):
if not self.contains_cell(start):
return Route.empty()
start_index = [index for index, tile in enumerate(self._path) if tile.cell == start][0]
backwards_subroutes = {Route.create(self._path[index:start_index + 1]) for index in range(start_index, -1, -1)}
forwards_subroutes = {Route.create(self._path[start_index:index]) for index in range(start_index + 1, len(self._path) + 1)}
subroutes = backwards_subroutes.union(forwards_subroutes)
return [subroute for subroute in subroutes if len(subroute.cities) >= 2]
def contains_cell(self, cell):
return cell in [tile.cell for tile in self]
@property
def cities(self):
return [tile for tile in self._path if tile.is_city]
def __iter__(self):
return iter(self._path)
def __bool__(self):
return bool(self._path)
def __len__(self):
return len(self._path)
def __hash__(self):
return hash(tuple(sorted([tile.cell for tile in self._path])))
def __eq__(self, other):
return isinstance(other, Route) and set(other._path) == set(self._path)
def __str__(self):
return ", ".join([str(tile.cell) for tile in self])
def run(self, board, train, railroad, phase):
if railroad.is_removed:
raise ValueError("Cannot run routes for a removed railroad: {}".format(railroad.name))
visited_cities = self.value(board, train, railroad, phase)
return _RunRoute(self, visited_cities, train)
|
class Route(object):
@staticmethod
def create(path):
pass
@staticmethod
def empty():
pass
@staticmethod
def single(tile):
pass
def __init__(self, path):
pass
def merge(self, route):
pass
def _best_cities(self, train, route_city_values, station_cities, include=None):
pass
def value(self, board, train, railroad, phase):
pass
def overlap(self, other):
pass
def subroutes(self, start):
pass
def contains_cell(self, cell):
pass
@property
def cities(self):
pass
def __iter__(self):
pass
def __bool__(self):
pass
def __len__(self):
pass
def __hash__(self):
pass
def __eq__(self, other):
pass
def __str__(self):
pass
def run(self, board, train, railroad, phase):
pass
| 23 | 0 | 5 | 1 | 4 | 0 | 1 | 0.09 | 1 | 12 | 3 | 0 | 15 | 2 | 18 | 18 | 109 | 28 | 74 | 44 | 51 | 7 | 69 | 40 | 50 | 3 | 1 | 2 | 25 |
8,083 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/tiles.py
|
routes1846.tiles.Tile
|
class Tile(object):
@staticmethod
def create(id, edges, value, quantity, phase, is_city=False, is_z=False, is_chicago=False):
paths = collections.defaultdict(list)
if is_city and not is_chicago:
exits = set(edges)
for side in exits:
paths[side].extend(list(exits - {side}))
else:
for edge in edges:
paths[edge[0]].append(edge[1])
paths[edge[1]].append(edge[0])
return Tile(id, paths, int(value), int(quantity), int(phase), is_city, is_z, is_chicago)
def __init__(self, id, paths, value, quantity, phase, is_city=False, is_z=False, is_chicago=False):
self.id = id
self.paths = {enter: tuple(exits) for enter, exits in paths.items()}
self.value = value
self.quantity = quantity
self.phase = phase
self.is_city = is_city
self.is_z = is_z
self.is_chicago = is_chicago
if self.is_chicago:
self.capacity = 4
elif self.is_z:
self.capacity = min(self.phase, 3)
elif self.is_city:
self.capacity = min(self.phase, 2)
else:
self.capacity = 0
|
class Tile(object):
@staticmethod
def create(id, edges, value, quantity, phase, is_city=False, is_z=False, is_chicago=False):
pass
def __init__(self, id, paths, value, quantity, phase, is_city=False, is_z=False, is_chicago=False):
pass
| 4 | 0 | 16 | 2 | 14 | 0 | 4 | 0 | 1 | 4 | 0 | 0 | 1 | 9 | 2 | 2 | 34 | 4 | 30 | 17 | 26 | 0 | 25 | 16 | 22 | 4 | 1 | 2 | 8 |
8,084 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/tokens.py
|
routes1846.tokens.MeatPackingToken
|
class MeatPackingToken(PrivateCompanyToken):
pass
|
class MeatPackingToken(PrivateCompanyToken):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 2 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 3 | 0 | 0 |
8,085 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/tokens.py
|
routes1846.tokens.PrivateCompanyToken
|
class PrivateCompanyToken(Token):
def __init__(self, cell, railroad):
if railroad.is_removed:
raise ValueError("A removed railroad cannot place a private company's token: {}".format(railroad.name))
super().__init__(cell, railroad)
|
class PrivateCompanyToken(Token):
def __init__(self, cell, railroad):
pass
| 2 | 0 | 5 | 1 | 4 | 0 | 2 | 0 | 1 | 2 | 0 | 2 | 1 | 0 | 1 | 2 | 6 | 1 | 5 | 2 | 3 | 0 | 5 | 2 | 3 | 2 | 2 | 1 | 2 |
8,086 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/tokens.py
|
routes1846.tokens.Station
|
class Station(Token):
pass
|
class Station(Token):
pass
| 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 2 | 0 | 2 | 1 | 1 | 0 | 2 | 1 | 1 | 0 | 2 | 0 | 0 |
8,087 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/board.py
|
routes1846.board.Board
|
class Board(object):
@staticmethod
def load():
board_tiles = {board_tile.cell: board_tile for board_tile in boardtile.load()}
return Board(board_tiles)
def __init__(self, board_tiles):
self._board_tiles = board_tiles
self._placed_tiles = {}
def place_tile(self, coord, tile, orientation):
cell = Cell.from_coord(coord)
if cell == CHICAGO_CELL or tile.is_chicago:
raise ValueError("Since Chicago ({}) is a special tile, please use Board.place_chicago().".format(CHICAGO_CELL))
if int(orientation) not in range(0, 6):
raise ValueError("Orientation out of range. Expected between 0 and 5, inclusive. Got {}.".format(orientation))
old_tile = self.get_space(cell)
self._validate_place_tile_space_type(tile, old_tile)
self._validate_place_tile_neighbors(cell, tile, orientation)
if old_tile:
self._validate_place_tile_upgrade(old_tile, cell, tile, orientation)
self._placed_tiles[cell] = PlacedTile.place(old_tile.name, cell, tile, orientation, stations=old_tile.stations, port_value=old_tile.port_value, meat_value=old_tile.meat_value)
else:
self._placed_tiles[cell] = PlacedTile.place(None, cell, tile, orientation)
def place_station(self, coord, railroad):
cell = Cell.from_coord(coord)
if cell == CHICAGO_CELL:
raise ValueError("Since Chicago ({}) is a special tile, please use Board.place_chicago_station().".format(CHICAGO_CELL))
tile = self.get_space(cell)
if not tile.is_city:
raise ValueError("{} is not a city, so it cannot have a station.".format(cell))
tile.add_station(railroad)
def place_chicago(self, tile):
cell = CHICAGO_CELL
old_tile = self._placed_tiles.get(cell) or self._board_tiles.get(cell)
if not old_tile.phase or old_tile.phase >= tile.phase:
raise ValueError("{}: Going from phase {} to phase {} is not an upgrade.".format(cell, old_tile.phase, tile.phase))
new_tile = Chicago.place(tile, old_tile.exit_cell_to_station, port_value=old_tile.port_value, meat_value=old_tile.meat_value)
self._placed_tiles[cell] = new_tile
def place_chicago_station(self, railroad, exit_side):
chicago = self.get_space(CHICAGO_CELL)
exit_cell = CHICAGO_CELL.neighbors[exit_side]
chicago.add_station(railroad, exit_cell)
def place_seaport_token(self, coord, railroad):
if railroad.is_removed:
raise ValueError("A removed railroad cannot place Steamboat Company's token: {}".format(railroad.name))
current_cell = Cell.from_coord(coord)
for cell in board_cells():
space = self.get_space(cell)
if space and space.port_token and cell != current_cell:
raise ValueError("Cannot place the seaport token on {}. It's already been placed on {}.".format(current_cell, cell))
self.get_space(current_cell).place_seaport_token(railroad)
def place_meat_packing_token(self, coord, railroad):
if railroad.is_removed:
raise ValueError("A removed railroad cannot place Meat Packing Company's token: {}".format(railroad.name))
current_cell = Cell.from_coord(coord)
for cell in board_cells():
space = self.get_space(cell)
if space and space.meat_token and cell != current_cell:
raise ValueError("Cannot place the meat packing token on {}. It's already been placed on {}.".format(current_cell, cell))
self.get_space(current_cell).place_meat_packing_token(railroad)
def stations(self, railroad_name=None):
all_tiles = list(self._placed_tiles.values()) + list(self._board_tiles.values())
all_stations = itertools.chain.from_iterable([tile.stations for tile in all_tiles if isinstance(tile, (boardtile.City, PlacedTile))])
if railroad_name:
return tuple([station for station in all_stations if station.railroad.name == railroad_name])
else:
return tuple(all_stations)
def get_space(self, cell):
return self._placed_tiles.get(cell) or self._board_tiles.get(cell)
def validate(self):
invalid = []
for cell, placed_tile in self._placed_tiles.items():
if not placed_tile.stations:
for neighbor_cell in placed_tile.paths():
neighbor = self.get_space(neighbor_cell)
if neighbor and cell in neighbor.paths():
break
else:
invalid.append(cell)
if invalid:
invalid_str = ", ".join([str(cell) for cell in invalid])
raise ValueError("Tiles at the following spots have no neighbors and no stations: {}".format(invalid_str))
def _validate_place_tile_space_type(self, tile, old_tile):
if old_tile and old_tile.is_terminal_city:
raise ValueError("Cannot upgrade the terminal cities.")
if not old_tile or not old_tile.is_city:
if tile.is_city or tile.is_z:
tile_type = "Z city" if tile.is_z else "city"
raise ValueError("{} is a track space, but you placed a {} ({}).".format(cell, tile_type, tile.id))
elif old_tile.is_z:
if not tile.is_z:
tile_type = "city" if tile.is_city else "track"
raise ValueError("{} is a Z city space, but you placed a {} ({}).".format(cell, tile_type, tile.id))
elif old_tile.is_city:
if not tile.is_city or tile.is_z:
tile_type = "Z city" if tile.is_z else "track"
raise ValueError("{} is a regular city space, but you placed a {} ({}).".format(cell, tile_type, tile.id))
def _validate_place_tile_neighbors(self, cell, tile, orientation):
for neighbor in PlacedTile.get_paths(cell, tile, orientation):
neighbor_space = self.get_space(neighbor)
if neighbor_space and neighbor_space.phase is None and cell not in neighbor_space.paths():
tile_type = "terminal city" if neighbor_space.is_terminal_city else "pre-printed phase 4 tile"
raise ValueError("Placing tile {} on {} in orientation {} runs into the side of the {} at {}.".format(
tile.id, cell, orientation, tile_type, neighbor_space.cell))
def _validate_place_tile_upgrade(self, old_tile, cell, new_tile, orientation):
if old_tile:
if old_tile.phase is None:
raise ValueError("{} cannot be upgraded.".format(cell))
elif old_tile.phase >= new_tile.phase:
raise ValueError("{}: Going from phase {} to phase {} is not an upgrade.".format(cell, old_tile.phase, new_tile.phase))
for old_start, old_ends in old_tile._paths.items():
old_paths = tuple([(old_start, end) for end in old_ends])
new_paths = tuple([(start, end) for start, ends in PlacedTile.get_paths(cell, new_tile, orientation).items() for end in ends])
if not all(old_path in new_paths for old_path in old_paths):
raise ValueError("The new tile placed on {} does not preserve all the old paths.".format(cell))
|
class Board(object):
@staticmethod
def load():
pass
def __init__(self, board_tiles):
pass
def place_tile(self, coord, tile, orientation):
pass
def place_station(self, coord, railroad):
pass
def place_chicago(self, tile):
pass
def place_chicago_station(self, railroad, exit_side):
pass
def place_seaport_token(self, coord, railroad):
pass
def place_meat_packing_token(self, coord, railroad):
pass
def stations(self, railroad_name=None):
pass
def get_space(self, cell):
pass
def validate(self):
pass
def _validate_place_tile_space_type(self, tile, old_tile):
pass
def _validate_place_tile_neighbors(self, cell, tile, orientation):
pass
def _validate_place_tile_upgrade(self, old_tile, cell, new_tile, orientation):
pass
| 16 | 0 | 9 | 1 | 8 | 0 | 4 | 0 | 1 | 11 | 4 | 0 | 13 | 2 | 14 | 14 | 140 | 26 | 114 | 48 | 98 | 0 | 107 | 47 | 92 | 11 | 1 | 4 | 49 |
8,088 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/boardtile.py
|
routes1846.boardtile.BoardSpace
|
class BoardSpace(object):
def __init__(self, name, cell, phase, paths, is_city=False, is_z=False, is_chicago=False, is_terminal_city=False,
port_value=0, meat_value=0):
self.name = name or str(cell)
self.cell = cell
self.phase = phase = None if phase == 4 else phase # A built-in phase 4 tile is similar to a terminal city
self._paths = paths
self.port_value = port_value
self.port_token = None
self.meat_value = meat_value
self.meat_token = None
self.is_city = is_city
self.is_z = is_z
self.is_chicago = is_chicago
self.is_terminal_city = is_terminal_city
def paths(self, enter_from=None, railroad=None):
if railroad and railroad.is_removed:
raise ValueError("A removed railroad cannot run routes: {}".format(railroad.name))
if enter_from:
return self._paths[enter_from]
else:
return tuple(self._paths.keys())
def place_seaport_token(self, railroad):
if railroad.is_removed:
raise ValueError("A removed railroad cannot place Steamboat Company's token: {}".format(railroad.name))
if self.port_value == 0:
raise ValueError("It is not legal to place the seaport token on this space ({}).".format(self.cell))
self.port_token = SeaportToken(self.cell, railroad)
def place_meat_packing_token(self, railroad):
if railroad.is_removed:
raise ValueError("A removed railroad cannot place Meat Packing Company's token: {}".format(railroad.name))
if self.meat_value == 0:
raise ValueError("It is not legal to place the meat packing token on this space ({}).".format(self.cell))
self.meat_token = MeatPackingToken(self.cell, railroad)
def port_bonus(self, railroad, phase):
return self.port_value if phase != 4 and self.port_token and self.port_token.railroad == railroad else 0
def meat_bonus(self, railroad, phase):
return self.meat_value if phase != 4 and self.meat_token and self.meat_token.railroad == railroad else 0
|
class BoardSpace(object):
def __init__(self, name, cell, phase, paths, is_city=False, is_z=False, is_chicago=False, is_terminal_city=False,
port_value=0, meat_value=0):
pass
def paths(self, enter_from=None, railroad=None):
pass
def place_seaport_token(self, railroad):
pass
def place_meat_packing_token(self, railroad):
pass
def port_bonus(self, railroad, phase):
pass
def meat_bonus(self, railroad, phase):
pass
| 7 | 0 | 7 | 1 | 6 | 0 | 3 | 0.03 | 1 | 5 | 2 | 3 | 6 | 12 | 6 | 6 | 49 | 11 | 38 | 20 | 30 | 1 | 36 | 19 | 29 | 3 | 1 | 1 | 15 |
8,089 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/tokens.py
|
routes1846.tokens.Token
|
class Token(object):
def __init__(self, cell, railroad):
self.cell = cell
self.railroad = railroad
|
class Token(object):
def __init__(self, cell, railroad):
pass
| 2 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 0 | 0 | 2 | 1 | 2 | 1 | 1 | 4 | 0 | 4 | 4 | 2 | 0 | 4 | 4 | 2 | 1 | 1 | 0 | 1 |
8,090 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/boardtile.py
|
routes1846.boardtile.City
|
class City(BoardSpace):
@staticmethod
def create(coord, name, phase=0, edges=[], value=0, capacity=0, is_z=False, port_value=0, meat_value=0):
cell = Cell.from_coord(coord)
neighbors = {cell.neighbors[side] for side in edges}
if cell == CHICAGO_CELL:
paths = {cell.neighbors[side]: [] for side in edges}
return Chicago(phase, paths, neighbors, value, capacity, port_value=port_value, meat_value=meat_value)
else:
paths = {neighbor: list(neighbors - {neighbor}) for neighbor in neighbors}
return City(name, cell, phase, paths, neighbors, value, capacity, is_z, False, port_value=port_value, meat_value=meat_value)
def __init__(self, name, cell, phase, paths, neighbors, value, capacity, is_z=False, is_chicago=False, port_value=0, meat_value=0):
super(City, self).__init__(name, cell, phase, paths, True, is_z, is_chicago, port_value=port_value, meat_value=meat_value)
self.neighbors = neighbors
self._value = value
self.capacity = capacity
self._stations = []
@property
def stations(self):
return tuple(self._stations)
def value(self, railroad, phase):
return self._value + self.port_bonus(railroad, phase) + self.meat_bonus(railroad, phase)
def add_station(self, railroad):
if self.has_station(railroad.name):
raise ValueError("{} already has a station in {} ({}).".format(railroad.name, self.name, self.cell))
if self.capacity <= len(self.stations):
raise ValueError("{} ({}) cannot hold any more stations.".format(self.name, self.cell))
station = Station(self.cell, railroad)
self._stations.append(station)
return station
def get_station(self, railroad_name):
for station in self._stations:
if station.railroad.name == railroad_name:
return station
return None
def has_station(self, railroad_name):
return bool(self.get_station(railroad_name))
def passable(self, enter_cell, railroad):
return self.capacity - len(self.stations) > 0 or self.has_station(railroad.name)
|
class City(BoardSpace):
@staticmethod
def create(coord, name, phase=0, edges=[], value=0, capacity=0, is_z=False, port_value=0, meat_value=0):
pass
def __init__(self, name, cell, phase, paths, neighbors, value, capacity, is_z=False, is_chicago=False, port_value=0, meat_value=0):
pass
@property
def stations(self):
pass
def value(self, railroad, phase):
pass
def add_station(self, railroad):
pass
def get_station(self, railroad_name):
pass
def has_station(self, railroad_name):
pass
def passable(self, enter_cell, railroad):
pass
| 11 | 0 | 5 | 1 | 5 | 0 | 2 | 0 | 1 | 8 | 3 | 1 | 7 | 4 | 8 | 14 | 51 | 12 | 39 | 20 | 28 | 0 | 36 | 18 | 27 | 3 | 2 | 2 | 13 |
8,091 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/boardtile.py
|
routes1846.boardtile.Chicago
|
class Chicago(City):
def __init__(self, phase, paths, neighbors, value, capacity, port_value, meat_value):
super(Chicago, self).__init__("Chicago", CHICAGO_CELL, phase, paths, neighbors, value, capacity, False, True,
port_value=port_value, meat_value=meat_value)
self.exit_cell_to_station = {}
def add_station(self, railroad, exit_cell):
station = super(Chicago, self).add_station(railroad)
self.exit_cell_to_station[exit_cell] = station
return station
def passable(self, enter_cell, railroad):
return False
def get_station_exit_cell(self, user_station):
for exit_cell, station in self.exit_cell_to_station.items():
if station == user_station:
return exit_cell
raise ValueError("The requested station was not found: {}".format(user_station))
|
class Chicago(City):
def __init__(self, phase, paths, neighbors, value, capacity, port_value, meat_value):
pass
def add_station(self, railroad, exit_cell):
pass
def passable(self, enter_cell, railroad):
pass
def get_station_exit_cell(self, user_station):
pass
| 5 | 0 | 4 | 0 | 4 | 0 | 2 | 0 | 1 | 2 | 0 | 0 | 4 | 1 | 4 | 18 | 20 | 4 | 16 | 8 | 11 | 0 | 15 | 8 | 10 | 3 | 3 | 2 | 6 |
8,092 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/boardtile.py
|
routes1846.boardtile.TerminalCity
|
class TerminalCity(BoardSpace):
@staticmethod
def create(coord, name, edges, values, is_east=False, is_west=False, port_value=0, meat_value=0):
cell = Cell.from_coord(coord)
paths = {cell.neighbors[side]: [] for side in edges}
neighbors = set(paths.keys())
if is_east:
return EastTerminalCity(name, cell, paths, neighbors, values, port_value=port_value, meat_value=meat_value)
elif is_west:
return WestTerminalCity(name, cell, paths, neighbors, values, port_value=port_value, meat_value=meat_value)
else:
return TerminalCity(name, cell, paths, neighbors, values, port_value=port_value, meat_value=meat_value)
def __init__(self, name, cell, paths, neighbors, value_dict, port_value, meat_value):
super(TerminalCity, self).__init__(name, cell, None, paths, True, is_terminal_city=True, port_value=port_value, meat_value=meat_value)
self.neighbors = neighbors
self.phase1_value = value_dict["phase1"]
self.phase3_value = value_dict["phase3"]
def value(self, railroad, phase):
value = self.phase1_value if phase in (1, 2) else self.phase3_value
return value + self.port_bonus(railroad, phase) + self.meat_bonus(railroad, phase)
def passable(self, enter_cell, railroad):
return False
|
class TerminalCity(BoardSpace):
@staticmethod
def create(coord, name, edges, values, is_east=False, is_west=False, port_value=0, meat_value=0):
pass
def __init__(self, name, cell, paths, neighbors, value_dict, port_value, meat_value):
pass
def value(self, railroad, phase):
pass
def passable(self, enter_cell, railroad):
pass
| 6 | 0 | 6 | 1 | 5 | 0 | 2 | 0 | 1 | 5 | 3 | 2 | 3 | 3 | 4 | 10 | 28 | 6 | 22 | 13 | 16 | 0 | 19 | 12 | 14 | 3 | 2 | 1 | 7 |
8,093 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/boardtile.py
|
routes1846.boardtile.Track
|
class Track(BoardSpace):
@staticmethod
def create(coord, edges, phase=None):
cell = Cell.from_coord(coord)
paths = collections.defaultdict(list)
for start_edge, end_edge in edges:
start_cell = cell.neighbors[start_edge]
end_cell = cell.neighbors[end_edge]
paths[start_cell].append(end_cell)
paths[end_cell].append(start_cell)
return Track(cell, phase, paths)
def __init__(self, cell, phase, paths):
super(Track, self).__init__(None, cell, phase, paths)
def value(self, railroad, phase):
return 0
|
class Track(BoardSpace):
@staticmethod
def create(coord, edges, phase=None):
pass
def __init__(self, cell, phase, paths):
pass
def value(self, railroad, phase):
pass
| 5 | 0 | 5 | 1 | 4 | 0 | 1 | 0 | 1 | 3 | 1 | 0 | 2 | 0 | 3 | 9 | 20 | 5 | 15 | 10 | 10 | 0 | 14 | 9 | 10 | 2 | 2 | 1 | 4 |
8,094 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/boardtile.py
|
routes1846.boardtile.WestTerminalCity
|
class WestTerminalCity(TerminalCity):
def __init__(self, name, cell, paths, neighbors, value_dict, port_value, meat_value):
super(WestTerminalCity, self).__init__(name, cell, paths, neighbors, value_dict, port_value, meat_value)
self.bonus = value_dict["bonus"]
def value(self, railroad, phase, east_to_west=False):
return super(WestTerminalCity, self).value(railroad, phase) + (self.bonus if east_to_west else 0)
|
class WestTerminalCity(TerminalCity):
def __init__(self, name, cell, paths, neighbors, value_dict, port_value, meat_value):
pass
def value(self, railroad, phase, east_to_west=False):
pass
| 3 | 0 | 3 | 1 | 3 | 0 | 2 | 0 | 1 | 1 | 0 | 0 | 2 | 1 | 2 | 12 | 8 | 2 | 6 | 4 | 3 | 0 | 6 | 4 | 3 | 2 | 3 | 0 | 3 |
8,095 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/cell.py
|
routes1846.cell.Cell
|
class Cell(object):
@staticmethod
def from_coord(coord):
if len(coord) < 2 or len(coord) > 3:
raise ValueError("Provided invalid coord: {}".format(coord))
row, col = coord[0], int(coord[1:])
if row not in _CELL_DB or col not in _CELL_DB[row]:
raise ValueError("The coordinate provided is not legal: {}".format(coord))
return _CELL_DB[row][col]
def __init__(self, row, col):
self.__row = row
self.__col = col
@property
def neighbors(self):
return {
0: _CELL_DB.get(chr(ord(self.__row) + 1), {}).get(self.__col - 1),
1: _CELL_DB.get(self.__row, {}).get(self.__col - 2),
2: _CELL_DB.get(chr(ord(self.__row) - 1), {}).get(self.__col - 1),
3: _CELL_DB.get(chr(ord(self.__row) - 1), {}).get(self.__col + 1),
4: _CELL_DB.get(self.__row, {}).get(self.__col + 2),
5: _CELL_DB.get(chr(ord(self.__row) + 1), {}).get(self.__col + 1)
}
def __hash__(self):
return hash(str(self))
def __eq__(self, other):
if not isinstance(other, Cell):
return False
return self.__col == other.__col and self.__row == other.__row
def __gt__(self, other):
if self.__row == other.__row:
return self.__col > other.__col
else:
return self.__row > other.__row
def __lt__(self, other):
if self.__row == other.__row:
return self.__col < other.__col
else:
return self.__row < other.__row
def __ge__(self, other):
return self > other or self == other
def __le__(self, other):
return self < other or self == other
def __str__(self):
return "{}{}".format(self.__row, self.__col)
def __repr__(self):
return str(self)
|
class Cell(object):
@staticmethod
def from_coord(coord):
pass
def __init__(self, row, col):
pass
@property
def neighbors(self):
pass
def __hash__(self):
pass
def __eq__(self, other):
pass
def __gt__(self, other):
pass
def __lt__(self, other):
pass
def __ge__(self, other):
pass
def __le__(self, other):
pass
def __str__(self):
pass
def __repr__(self):
pass
| 14 | 0 | 4 | 0 | 4 | 0 | 1 | 0 | 1 | 3 | 0 | 0 | 10 | 2 | 11 | 11 | 57 | 11 | 46 | 17 | 32 | 0 | 35 | 15 | 23 | 3 | 1 | 1 | 16 |
8,096 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/placedtile.py
|
routes1846.placedtile.Chicago
|
class Chicago(PlacedTile):
@staticmethod
def place(tile, exit_cell_to_station={}, port_value=None, meat_value=None):
paths = PlacedTile.get_paths(CHICAGO_CELL, tile, 0)
return Chicago(tile, exit_cell_to_station, paths, port_value, meat_value)
def __init__(self, tile, exit_cell_to_station={}, paths={}, port_value=None, meat_value=None):
super(Chicago, self).__init__("Chicago", CHICAGO_CELL, tile, list(exit_cell_to_station.values()), paths, port_value, meat_value)
self.exit_cell_to_station = exit_cell_to_station
def paths(self, enter_from=None, railroad=None):
paths = list(super(Chicago, self).paths(enter_from))
if railroad:
enter_from_station = self.exit_cell_to_station.get(enter_from)
if enter_from_station:
if enter_from_station.railroad != railroad:
paths = []
else:
if not enter_from:
station = self.get_station(railroad.name)
paths = [self.get_station_exit_cell(station), Cell.from_coord("C5")] if station else []
else:
for exit in paths:
station = self.exit_cell_to_station.get(exit)
if station and station.railroad != railroad:
paths.remove(exit)
return tuple(paths)
def add_station(self, railroad, exit_cell):
if exit_cell not in self.paths():
raise ValueError("Illegal exit cell for Chicago")
station = super(Chicago, self).add_station(railroad)
self.exit_cell_to_station[exit_cell] = station
return station
def get_station_exit_cell(self, user_station):
for exit_cell, station in self.exit_cell_to_station.items():
if station == user_station:
return exit_cell
raise ValueError("The requested station was not found: {}".format(user_station))
def passable(self, enter_cell, railroad):
chicago_station = self.exit_cell_to_station.get(enter_cell)
if chicago_station:
return chicago_station.railroad == railroad
else:
return True
|
class Chicago(PlacedTile):
@staticmethod
def place(tile, exit_cell_to_station={}, port_value=None, meat_value=None):
pass
def __init__(self, tile, exit_cell_to_station={}, paths={}, port_value=None, meat_value=None):
pass
def paths(self, enter_from=None, railroad=None):
pass
def add_station(self, railroad, exit_cell):
pass
def get_station_exit_cell(self, user_station):
pass
def passable(self, enter_cell, railroad):
pass
| 8 | 0 | 7 | 0 | 7 | 0 | 3 | 0 | 1 | 5 | 1 | 0 | 5 | 1 | 6 | 21 | 49 | 7 | 42 | 17 | 34 | 0 | 38 | 16 | 31 | 8 | 2 | 5 | 17 |
8,097 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/placedtile.py
|
routes1846.placedtile.PlacedTile
|
class PlacedTile(object):
@staticmethod
def _rotate(side, orientation):
# ((side num) + (number of times rotated)) mod (number of sides)
return (side + int(orientation)) % 6
@staticmethod
def get_paths(cell, tile, orientation):
paths = {}
for start, ends in tile.paths.items():
start_cell = cell.neighbors[PlacedTile._rotate(start, orientation)]
paths[start_cell] = tuple([cell.neighbors[PlacedTile._rotate(end, orientation)] for end in ends])
if None in paths:
raise ValueError("Placing tile {} in orientation {} at {} goes off-map.".format(tile.id, orientation, cell))
return paths
@staticmethod
def place(name, cell, tile, orientation, stations=[], port_value=None, meat_value=None):
paths = PlacedTile.get_paths(cell, tile, orientation)
return PlacedTile(name, cell, tile, stations, paths, port_value, meat_value)
def __init__(self, name, cell, tile, stations=[], paths={}, port_value=None, meat_value=None):
self.name = name or str(cell)
self.cell = cell
self.tile = tile
self.capacity = tile.capacity
self._stations = list(stations)
self._paths = paths
self.port_value = port_value
self.port_token = None
self.meat_value = meat_value
self.meat_token = None
self.phase = self.tile.phase
self.is_city = self.tile.is_city
self.is_z = self.tile.is_z
self.is_terminal_city = False
def value(self, railroad, phase):
return self.tile.value + self.port_bonus(railroad, phase) + self.meat_bonus(railroad, phase)
def passable(self, enter_cell, railroad):
return self.capacity - len(self.stations) > 0 or self.has_station(railroad.name)
@property
def stations(self):
return tuple(self._stations)
def add_station(self, railroad):
if self.has_station(railroad.name):
raise ValueError("{} already has a station in {} ({}).".format(railroad.name, self.name, self.cell))
if self.capacity <= len(self.stations):
raise ValueError("{} ({}) cannot hold any more stations.".format(self.name, self.cell))
station = Station(self.cell, railroad)
self._stations.append(station)
return station
def get_station(self, railroad_name):
for station in self._stations:
if station.railroad.name == railroad_name:
return station
return None
def has_station(self, railroad_name):
return bool(self.get_station(railroad_name))
def place_seaport_token(self, railroad):
if railroad.is_removed:
raise ValueError("A removed railroad cannot place Steamboat Company's token: {}".format(railroad.name))
if self.port_value == 0:
raise ValueError("It is not legal to place the seaport token on this space ({}).".format(self.cell))
self.port_token = SeaportToken(self.cell, railroad)
def place_meat_packing_token(self, railroad):
if railroad.is_removed:
raise ValueError("A removed railroad cannot place Meat Packing Company's token: {}".format(railroad.name))
if self.meat_value == 0:
raise ValueError("It is not legal to place the meat packing token on this space ({}).".format(self.cell))
self.meat_token = MeatPackingToken(self.cell, railroad)
def port_bonus(self, railroad, phase):
return self.port_value if phase != 4 and self.port_token and self.port_token.railroad == railroad else 0
def meat_bonus(self, railroad, phase):
return self.meat_value if phase != 4 and self.meat_token and self.meat_token.railroad == railroad else 0
def paths(self, enter_from=None, railroad=None):
if railroad and railroad.is_removed:
raise ValueError("A removed railroad cannot run routes: {}".format(railroad.name))
if enter_from:
return self._paths[enter_from]
else:
return tuple(self._paths.keys())
|
class PlacedTile(object):
@staticmethod
def _rotate(side, orientation):
pass
@staticmethod
def get_paths(cell, tile, orientation):
pass
@staticmethod
def place(name, cell, tile, orientation, stations=[], port_value=None, meat_value=None):
pass
def __init__(self, name, cell, tile, stations=[], paths={}, port_value=None, meat_value=None):
pass
def value(self, railroad, phase):
pass
def passable(self, enter_cell, railroad):
pass
@property
def stations(self):
pass
def add_station(self, railroad):
pass
def get_station(self, railroad_name):
pass
def has_station(self, railroad_name):
pass
def place_seaport_token(self, railroad):
pass
def place_meat_packing_token(self, railroad):
pass
def port_bonus(self, railroad, phase):
pass
def meat_bonus(self, railroad, phase):
pass
def paths(self, enter_from=None, railroad=None):
pass
| 20 | 0 | 6 | 1 | 5 | 0 | 2 | 0.01 | 1 | 9 | 3 | 1 | 12 | 14 | 15 | 15 | 102 | 24 | 77 | 40 | 57 | 1 | 72 | 36 | 56 | 3 | 1 | 2 | 29 |
8,098 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/railroads.py
|
routes1846.railroads.Railroad
|
class Railroad(object):
@staticmethod
def create(name, trains_str):
trains = [Train.create(train_str) for train_str in trains_str.split(",") if train_str] if trains_str else []
return Railroad(name, trains)
def __init__(self, name, trains):
self.name = name
self.trains = trains
self.has_mail_contract = False
def assign_mail_contract(self):
self.has_mail_contract = True
@property
def is_removed(self):
return False
|
class Railroad(object):
@staticmethod
def create(name, trains_str):
pass
def __init__(self, name, trains):
pass
def assign_mail_contract(self):
pass
@property
def is_removed(self):
pass
| 7 | 0 | 3 | 0 | 3 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 3 | 3 | 4 | 4 | 18 | 4 | 14 | 11 | 7 | 0 | 12 | 9 | 7 | 2 | 1 | 0 | 5 |
8,099 |
Auzzy/1846-routes
|
Auzzy_1846-routes/routes1846/railroads.py
|
routes1846.railroads.RemovedRailroad
|
class RemovedRailroad(Railroad):
@staticmethod
def create(name):
return RemovedRailroad(name)
def __init__(self, name):
super().__init__(name, [])
self.has_mail_contract = False
def assign_mail_contract(self):
raise ValueError("Cannot assign Mail Contract to a removed railroad: {}".format(self.name))
@property
def is_removed(self):
return True
|
class RemovedRailroad(Railroad):
@staticmethod
def create(name):
pass
def __init__(self, name):
pass
def assign_mail_contract(self):
pass
@property
def is_removed(self):
pass
| 7 | 0 | 3 | 0 | 2 | 0 | 1 | 0 | 1 | 2 | 0 | 0 | 3 | 1 | 4 | 8 | 16 | 4 | 12 | 8 | 5 | 0 | 10 | 6 | 5 | 1 | 2 | 0 | 4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.