id
int64
0
843k
repository_name
stringlengths
7
55
file_path
stringlengths
9
332
class_name
stringlengths
3
290
human_written_code
stringlengths
12
4.36M
class_skeleton
stringlengths
19
2.2M
total_program_units
int64
1
9.57k
total_doc_str
int64
0
4.2k
AvgCountLine
float64
0
7.89k
AvgCountLineBlank
float64
0
300
AvgCountLineCode
float64
0
7.89k
AvgCountLineComment
float64
0
7.89k
AvgCyclomatic
float64
0
130
CommentToCodeRatio
float64
0
176
CountClassBase
float64
0
48
CountClassCoupled
float64
0
589
CountClassCoupledModified
float64
0
581
CountClassDerived
float64
0
5.37k
CountDeclInstanceMethod
float64
0
4.2k
CountDeclInstanceVariable
float64
0
299
CountDeclMethod
float64
0
4.2k
CountDeclMethodAll
float64
0
4.2k
CountLine
float64
1
115k
CountLineBlank
float64
0
9.01k
CountLineCode
float64
0
94.4k
CountLineCodeDecl
float64
0
46.1k
CountLineCodeExe
float64
0
91.3k
CountLineComment
float64
0
27k
CountStmt
float64
1
93.2k
CountStmtDecl
float64
0
46.1k
CountStmtExe
float64
0
90.2k
MaxCyclomatic
float64
0
759
MaxInheritanceTree
float64
0
16
MaxNesting
float64
0
34
SumCyclomatic
float64
0
6k
7,800
AtteqCom/zsl
AtteqCom_zsl/src/zsl/resource/guard.py
zsl.resource.guard.GuardedMixin
class GuardedMixin: """Add guarded CRUD methods to resource. The ``guard`` replaces the CRUD guarded methods with a wrapper with security checks around these methods. It adds this mixin into the resource automatically, but it can be declared on the resource manually for IDEs to accept calls to the guarded methods. """ def guarded_create(self, params, args, data): # type: (str, Dict[str, str], Dict[str, Any]) -> Dict[str, Any] pass def guarded_read(self, params, args, data): # type: (str, Dict[str, str], Dict[str, Any]) -> Dict[str, Any] pass def guarded_update(self, params, args, data): # type: (str, Dict[str, str], Dict[str, Any]) -> Dict[str, Any] pass def guarded_delete(self, params, args, data): # type: (str, Dict[str, str], Dict[str, Any]) -> Dict[str, Any] pass
class GuardedMixin: '''Add guarded CRUD methods to resource. The ``guard`` replaces the CRUD guarded methods with a wrapper with security checks around these methods. It adds this mixin into the resource automatically, but it can be declared on the resource manually for IDEs to accept calls to the guarded methods. ''' def guarded_create(self, params, args, data): pass def guarded_read(self, params, args, data): pass def guarded_update(self, params, args, data): pass def guarded_delete(self, params, args, data): pass
5
1
3
0
2
1
1
1.11
0
0
0
8
4
0
4
4
24
5
9
5
4
10
9
5
4
1
0
0
4
7,801
AtteqCom/zsl
AtteqCom_zsl/src/zsl/resource/guard.py
zsl.resource.guard.Access
class Access(Enum): ALLOW = 1 DENY = 2 CONTINUE = 3
class Access(Enum): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
49
4
0
4
4
3
0
4
4
3
0
4
0
0
7,802
AtteqCom/zsl
AtteqCom_zsl/src/zsl/interface/task_queue.py
zsl.interface.task_queue.KillWorkerException
class KillWorkerException(Exception): """If any task raises this exception a standalone worker will be killed.""" pass
class KillWorkerException(Exception): '''If any task raises this exception a standalone worker will be killed.''' pass
1
1
0
0
0
0
0
0.5
1
0
0
0
0
0
0
10
4
1
2
1
1
1
2
1
1
0
3
0
0
7,803
AtteqCom/zsl
AtteqCom_zsl/src/zsl/interface/task_queue.py
zsl.interface.task_queue.JobResult
class JobResult(TypedDict): task_name: Any data: Any
class JobResult(TypedDict): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
3
0
3
1
2
0
3
1
2
0
1
0
0
7,804
AtteqCom/zsl
AtteqCom_zsl/src/zsl/application/modules/context_module.py
zsl.application.modules.context_module.TestContextModule
class TestContextModule(DefaultContextModule): """Test application context to current configuration""" def _create_context(self): return InitializationContext(unit_test=True, initializers=unittest_initializers)
class TestContextModule(DefaultContextModule): '''Test application context to current configuration''' def _create_context(self): pass
2
1
2
0
2
0
1
0.33
1
1
1
0
1
0
1
3
5
1
3
2
1
1
3
2
1
1
2
0
1
7,805
AtteqCom/zsl
AtteqCom_zsl/src/zsl/interface/task.py
zsl.interface.task.ModelConversionError
class ModelConversionError(Exception): def __init__(self, obj, attribute): msg = "Can not fit dictionary into model '{0}' since the model " \ "does not have attribute '{1}'" super().__init__(msg.format(obj, attribute)) self._obj = obj self._attribute = attribute @property def obj(self): return self._obj @property def attribute(self): return self._attribute
class ModelConversionError(Exception): def __init__(self, obj, attribute): pass @property def obj(self): pass @property def attribute(self): pass
6
0
3
0
3
0
1
0
1
1
0
0
3
2
3
13
15
2
13
9
7
0
10
7
6
1
3
0
3
7,806
AtteqCom/zsl
AtteqCom_zsl/src/zsl/interface/gearman/worker.py
zsl.interface.gearman.worker.ReloadingWorker
class ReloadingWorker(gearman.GearmanWorker): def __init__(self, host_list=None): super().__init__(host_list) self._should_stop = False def on_job_complete(self, current_job, job_result): super().on_job_complete(current_job, job_result) if self._should_stop: quit() return True
class ReloadingWorker(gearman.GearmanWorker): def __init__(self, host_list=None): pass def on_job_complete(self, current_job, job_result): pass
3
0
4
0
4
0
2
0
1
1
0
0
2
1
2
2
10
1
9
4
6
0
9
4
6
2
1
1
3
7,807
AtteqCom/zsl
AtteqCom_zsl/src/zsl/interface/gearman/worker.py
zsl.interface.gearman.worker.GearmanTaskQueueWorker
class GearmanTaskQueueWorker(TaskQueueWorker): def __init__(self): super().__init__() self.gearman_worker = ReloadingWorker( ["{0}:{1}".format(self._config['GEARMAN']['host'], self._config['GEARMAN']['port'])]) self.gearman_worker.set_client_id(self._get_client_id()) self.gearman_worker.data_encoder = JSONDataEncoder self.gearman_worker.register_task(self._config['GEARMAN_TASK_NAME'], self.execute_gearman_job) self.gearman_worker.logical_worker = self self._current_worker = None def stop_worker(self): self._app.logger.info("Stopping Gearman worker on demand - quitting.") self._current_worker._should_stop = True def execute_gearman_job(self, worker, job): # type: (ReloadingWorker, gearman.job.GearmanJob) -> dict job = job_from_gearman_job(job) self._current_worker = worker return self.execute_job(job) def run(self): self._app.logger.info("Running the worker.") self.gearman_worker.work()
class GearmanTaskQueueWorker(TaskQueueWorker): def __init__(self): pass def stop_worker(self): pass def execute_gearman_job(self, worker, job): pass def run(self): pass
5
0
6
1
5
0
1
0.05
1
3
2
0
4
2
4
30
27
5
21
7
16
1
19
7
14
1
4
0
4
7,808
AtteqCom/zsl
AtteqCom_zsl/src/zsl/interface/gearman/json_data_encoder.py
zsl.interface.gearman.json_data_encoder.JSONDataEncoder
class JSONDataEncoder(gearman.DataEncoder): @classmethod def encode(cls, encodable_object): return json.dumps(encodable_object, cls=AppModelJSONEncoder) @classmethod def decode(cls, decodable_string): return json.loads(decodable_string)
class JSONDataEncoder(gearman.DataEncoder): @classmethod def encode(cls, encodable_object): pass @classmethod def decode(cls, decodable_string): pass
5
0
2
0
2
0
1
0
1
1
1
0
0
0
2
2
8
1
7
5
2
0
5
3
2
1
1
0
2
7,809
AtteqCom/zsl
AtteqCom_zsl/src/zsl/resource/model_resource.py
zsl.resource.model_resource.ModelResourceBase
class ModelResourceBase(TransactionalSupportMixin): """ModelResource works only for tables with a single-column identifier (key). .. automethod:: __init__ """ def __init__(self, model_cls=None): """ Create Model CRUD resource for ``model_cls`` """ super().__init__() if not model_cls: self.model_cls = self.__model__ else: self.model_cls = model_cls mapper = class_mapper(self.model_cls) self._model_pk = mapper.primary_key[0] self._model_columns = [column.key for column in mapper.column_attrs] self._related_columns = [column.key for column in mapper.relationships] def to_filter(self, query, arg): return filter_from_url_arg(self.model_cls, query, arg) def add_related(self, query, related): return apply_related(self.model_cls, query, related) def set_ordering(self, query, arg): return order_from_url_arg(self.model_cls, query, arg)
class ModelResourceBase(TransactionalSupportMixin): '''ModelResource works only for tables with a single-column identifier (key). .. automethod:: __init__ ''' def __init__(self, model_cls=None): ''' Create Model CRUD resource for ``model_cls`` ''' pass def to_filter(self, query, arg): pass def add_related(self, query, related): pass def set_ordering(self, query, arg): pass
5
2
6
1
4
1
1
0.41
1
1
0
1
4
4
4
5
32
8
17
10
12
7
16
10
11
2
1
1
5
7,810
AtteqCom/zsl
AtteqCom_zsl/src/zsl/resource/model_resource.py
zsl.resource.model_resource.ReadOnlyResourceMixin
class ReadOnlyResourceMixin: """ The mixin to be used to forbid the update/delete and create operations. Remember the Python's MRO and place this mixin at the right place in the inheritance declaration. .. automethod:: create .. automethod:: update .. automethod:: delete """ OPERATION_CREATE = 'create' OPERATION_UPDATE = 'update' OPERATION_DELETE = 'delete' @staticmethod def create(params, args, data): """Raises exception. Just raises ReadOnlyResourceUpdateOperationException to indicate that this method is not available. :raises ReadOnlyResourceUpdateOperationException: when accessed """ raise ReadOnlyResourceUpdateOperationException(ReadOnlyResourceMixin.OPERATION_CREATE) @staticmethod def update(params, args, data): """Raises exception. Just raises ReadOnlyResourceUpdateOperationException to indicate that this method is not available. :raises ReadOnlyResourceUpdateOperationException: when accessed """ raise ReadOnlyResourceUpdateOperationException(ReadOnlyResourceMixin.OPERATION_UPDATE) @staticmethod def delete(params, args, data): """Raises exception. Just raises ReadOnlyResourceUpdateOperationException to indicate that this method is not available. :raises ReadOnlyResourceUpdateOperationException: when accessed """ raise ReadOnlyResourceUpdateOperationException(ReadOnlyResourceMixin.OPERATION_DELETE)
class ReadOnlyResourceMixin: ''' The mixin to be used to forbid the update/delete and create operations. Remember the Python's MRO and place this mixin at the right place in the inheritance declaration. .. automethod:: create .. automethod:: update .. automethod:: delete ''' @staticmethod def create(params, args, data): '''Raises exception. Just raises ReadOnlyResourceUpdateOperationException to indicate that this method is not available. :raises ReadOnlyResourceUpdateOperationException: when accessed ''' pass @staticmethod def update(params, args, data): '''Raises exception. Just raises ReadOnlyResourceUpdateOperationException to indicate that this method is not available. :raises ReadOnlyResourceUpdateOperationException: when accessed ''' pass @staticmethod def delete(params, args, data): '''Raises exception. Just raises ReadOnlyResourceUpdateOperationException to indicate that this method is not available. :raises ReadOnlyResourceUpdateOperationException: when accessed ''' pass
7
4
9
2
2
5
1
1.69
0
1
1
1
0
0
3
3
46
11
13
10
6
22
10
7
6
1
0
0
3
7,811
AtteqCom/zsl
AtteqCom_zsl/src/zsl/resource/model_resource.py
zsl.resource.model_resource.ReadOnlyResourceUpdateOperationException
class ReadOnlyResourceUpdateOperationException(Exception): def __init__(self, operation): self._operation = operation super().__init__( "Can not perform operation '{0}' on ReadOnlyResource.".format(operation)) def get_operation(self): return self._operation operation = property(get_operation)
class ReadOnlyResourceUpdateOperationException(Exception): def __init__(self, operation): pass def get_operation(self): pass
3
0
3
0
3
0
1
0
1
1
0
0
2
1
2
12
10
2
8
5
5
0
7
5
4
1
3
0
2
7,812
AtteqCom/zsl
AtteqCom_zsl/src/zsl/resource/model_resource.py
zsl.resource.model_resource.ResourceQueryContext
class ResourceQueryContext: """ The context of the resource query. - holds the parameters and arguments of the query, - holds the related models which should be fetched (parsed from the arguments), - holds the given filter and splits it to the given field array (parsed from the arguments) .. automethod:: __init__ """ def __init__(self, params, args, data): # type: (dict, list, dict) -> () self._args = args.copy() self._args_original = args self._data = data self._params = params if _is_list(params) else [params] # Prepare fields and related. if 'related' in self._args: self._args['related'] = self._args['related'].split(',') if 'fields' in self._args: self._args['fields'] = self._args['fields'].split(',') # we can pass related fields with this, ensure its in 'related' union of two lists self._args['related'] = list( set(self._args.get('related', [])) | set(related_from_fields(self._args['fields']))) @property def params(self): """Params are given as the part of the path in URL. For example GET /entities/1 will have. 1 in the params. """ return self._params @property def args(self): """Args are in the query part of the url ?related=&filter_by etc.""" return self._args @property def data(self): """Body of the request.""" return self._data def get_row_id(self): """First parameter, if given, else None. Handy for GET requests.""" return None if len(self.params) == 0 else self.params[0] def get_related(self): """Related argument - parsed as array, original argument containing the list of comma delimited models which should be fetched along with the resource. """ return self._args.get('related', None) def get_filter_by(self): """Filter argument - list of filters.""" return self._args.get('filter_by', None)
class ResourceQueryContext: ''' The context of the resource query. - holds the parameters and arguments of the query, - holds the related models which should be fetched (parsed from the arguments), - holds the given filter and splits it to the given field array (parsed from the arguments) .. automethod:: __init__ ''' def __init__(self, params, args, data): pass @property def params(self): '''Params are given as the part of the path in URL. For example GET /entities/1 will have. 1 in the params. ''' pass @property def args(self): '''Args are in the query part of the url ?related=&filter_by etc.''' pass @property def data(self): '''Body of the request.''' pass def get_row_id(self): '''First parameter, if given, else None. Handy for GET requests.''' pass def get_related(self): '''Related argument - parsed as array, original argument containing the list of comma delimited models which should be fetched along with the resource. ''' pass def get_filter_by(self): '''Filter argument - list of filters.''' pass
11
7
6
0
3
2
2
0.74
0
2
0
0
7
4
7
7
58
11
27
15
16
20
23
12
15
4
0
1
11
7,813
AtteqCom/zsl
AtteqCom_zsl/src/zsl/task/job_context.py
zsl.task.job_context.StatusCodeResponder
class StatusCodeResponder(Responder): def __init__(self, status_code: int): self._status_code = status_code def respond(self, r: Response) -> None: r.status_code = self._status_code
class StatusCodeResponder(Responder): def __init__(self, status_code: int): pass def respond(self, r: Response) -> None: pass
3
0
2
0
2
0
1
0
1
1
0
0
2
1
2
3
6
1
5
4
2
0
5
4
2
1
1
0
2
7,814
AtteqCom/zsl
AtteqCom_zsl/src/zsl/task/job_context.py
zsl.task.job_context.ResponseDict
class ResponseDict(TypedDict): headers: HeadersDict status_code: int
class ResponseDict(TypedDict): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
3
0
3
1
2
0
3
1
2
0
1
0
0
7,815
AtteqCom/zsl
AtteqCom_zsl/src/zsl/task/job_context.py
zsl.task.job_context.JobData
class JobData(TypedDict): """ A dictionary that represents the data associated with a job. :ivar path: The path to the job data. :vartype path: str :ivar data: The data associated with the job. :vartype data: dict[str, Any] Example: -------- >>> job_data = JobData(path='/path/to/job', data={'name': 'John', 'age': 30}) """ path: str data: dict[str, Any]
class JobData(TypedDict): ''' A dictionary that represents the data associated with a job. :ivar path: The path to the job data. :vartype path: str :ivar data: The data associated with the job. :vartype data: dict[str, Any] Example: -------- >>> job_data = JobData(path='/path/to/job', data={'name': 'John', 'age': 30}) ''' pass
1
1
0
0
0
0
0
3.33
1
0
0
0
0
0
0
0
16
3
3
1
2
10
3
1
2
0
1
0
0
7,816
AtteqCom/zsl
AtteqCom_zsl/src/zsl/task/job_context.py
zsl.task.job_context.Job
class Job: def __init__(self, data: JobData): self.data = data @property def path(self) -> str: """Job's path. :getter: Returns job's path :type: str """ return self.data["path"] @property def payload(self) -> dict[str, Any]: """Data part of job. :getter: Returns job's payload :type: dict """ return self.data["data"] @property def is_valid(self) -> bool: """Validity of job's data. :getter: Returns if job's data are valid :type: bool """ return self.data and "path" in self.data and "data" in self.data
class Job: def __init__(self, data: JobData): pass @property def path(self) -> str: '''Job's path. :getter: Returns job's path :type: str ''' pass @property def payload(self) -> dict[str, Any]: '''Data part of job. :getter: Returns job's payload :type: dict ''' pass @property def is_valid(self) -> bool: '''Validity of job's data. :getter: Returns if job's data are valid :type: bool ''' pass
8
3
6
1
2
3
1
1
0
5
1
0
4
1
4
4
30
6
12
9
4
12
9
6
4
1
0
0
4
7,817
AtteqCom/zsl
AtteqCom_zsl/src/zsl/task/job_context.py
zsl.task.job_context.HeadersDict
class HeadersDict(TypedDict): Location: str
class HeadersDict(TypedDict): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
2
0
2
1
1
0
2
1
1
0
1
0
0
7,818
AtteqCom/zsl
AtteqCom_zsl/src/zsl/interface/celery/worker.py
zsl.interface.celery.worker.CeleryTaskQueueWorkerBase
class CeleryTaskQueueWorkerBase(TaskQueueWorker): """Base class for celery task queue worker. It contains only the task execution part of worker. """ def execute_celery_task(self, job_data: dict) -> JobResult: """Creates job from task and executes the job. :param job_data: job's data :return: job results :rtype: dict """ job = Job(job_data) return self.execute_job(job)
class CeleryTaskQueueWorkerBase(TaskQueueWorker): '''Base class for celery task queue worker. It contains only the task execution part of worker. ''' def execute_celery_task(self, job_data: dict) -> JobResult: '''Creates job from task and executes the job. :param job_data: job's data :return: job results :rtype: dict ''' pass
2
2
9
1
3
5
1
2
1
3
2
2
1
0
1
27
15
3
4
3
2
8
4
3
2
1
4
0
1
7,819
AtteqCom/zsl
AtteqCom_zsl/src/zsl/task/job_context.py
zsl.task.job_context.DelegatingJobContext
class DelegatingJobContext(JobContext): def __init__(self, job: Job, task: object, task_callable: Callable): wrapped_job_context = JobContext.get_current_context() super().__init__(job, task, task_callable) self._wrapped_job_context = wrapped_job_context proxy_object_to_delegate(self, wrapped_job_context) def stop_delegating(self): JobContext._set_current_context(self._wrapped_job_context)
class DelegatingJobContext(JobContext): def __init__(self, job: Job, task: object, task_callable: Callable): pass def stop_delegating(self): pass
3
0
4
0
4
0
1
0
1
3
1
0
2
1
2
10
9
1
8
5
5
0
8
5
5
1
1
0
2
7,820
AtteqCom/zsl
AtteqCom_zsl/src/zsl/router/task.py
zsl.router.task.TaskNamespace
class TaskNamespace: def __init__(self, namespace, task_configuration): # type: (str, TaskConfiguration)->None self._task_packages = [] self._routes = {} self._task_configuration = task_configuration self._namespace = namespace def add_packages(self, packages): # type: (List[str])->TaskNamespace """ Adds an automatic resolution of urls into tasks. :param packages: The url will determine package/module and the class. :return: self """ assert isinstance(packages, list), "Packages must be list of strings." self._task_packages += packages return self def get_packages(self): # type:()->List[str] return list(self._task_packages) def add_routes(self, routes): # type: (Dict[str, Callable])->TaskNamespace """Adds the detailed mapping of urls to tasks. :param routes: Mapping which defines how urls of the namespace are mapped to tasks. Each url (string) is mapped to a callable which creates the task instance. :return: self """ self._routes.update(routes) return self def get_routes(self): # type: ()->Dict[str, Callable] return self._routes.copy() def get_configuration(self): # type: ()->TaskConfiguration return self._task_configuration @property def namespace(self): # type:()->str return self._namespace
class TaskNamespace: def __init__(self, namespace, task_configuration): pass def add_packages(self, packages): ''' Adds an automatic resolution of urls into tasks. :param packages: The url will determine package/module and the class. :return: self ''' pass def get_packages(self): pass def add_routes(self, routes): '''Adds the detailed mapping of urls to tasks. :param routes: Mapping which defines how urls of the namespace are mapped to tasks. Each url (string) is mapped to a callable which creates the task instance. :return: self ''' pass def get_routes(self): pass def get_configuration(self): pass @property def namespace(self): pass
9
2
6
0
3
3
1
0.82
0
1
0
0
7
4
7
7
48
8
22
13
13
18
21
12
13
1
0
0
7
7,821
AtteqCom/zsl
AtteqCom_zsl/src/zsl/router/task.py
zsl.router.task.TaskConfiguration
class TaskConfiguration: def __init__(self): self._namespaces = [] # type: List[TaskNamespace] def create_namespace(self, namespace): # type:(str)->TaskNamespace namespace = TaskNamespace(namespace, self) self._namespaces.append(namespace) return namespace @property def namespaces(self): return list(self._namespaces)
class TaskConfiguration: def __init__(self): pass def create_namespace(self, namespace): pass @property def namespaces(self): pass
5
0
3
0
3
1
1
0.2
0
2
1
0
3
1
3
3
13
2
10
6
5
2
9
5
5
1
0
0
3
7,822
AtteqCom/zsl
AtteqCom_zsl/src/zsl/router/task.py
zsl.router.task.RoutingError
class RoutingError(ZslError): def __init__(self, path): msg = "Can not find task at path '{0}'.".format(path) super().__init__(msg) self._path = path @property def path(self): return self._path
class RoutingError(ZslError): def __init__(self, path): pass @property def path(self): pass
4
0
3
0
3
0
1
0
1
1
0
0
2
1
2
12
9
1
8
6
4
0
7
5
4
1
4
0
2
7,823
AtteqCom/zsl
AtteqCom_zsl/src/zsl/router/task.py
zsl.router.task.RouterStrategy
class RouterStrategy: __metaclass__ = ABCMeta def can_route(self, path): # type:(str)->bool pass def route(self, path): # type:(str)->Callable pass
class RouterStrategy: def can_route(self, path): pass def route(self, path): pass
3
0
3
0
2
1
1
0.33
0
0
0
2
2
0
2
2
10
2
6
4
3
2
6
4
3
1
0
0
2
7,824
AtteqCom/zsl
AtteqCom_zsl/src/zsl/router/task.py
zsl.router.task.PathTaskRouterStrategy
class PathTaskRouterStrategy(RouterStrategy): def __init__(self, task_configuration): # type: (TaskConfiguration)->None self._routes = {} for namespace_configuration in task_configuration.namespaces: namespace_routes = {} for k in namespace_configuration.get_routes(): namespace_routes[namespace_configuration.namespace + '/' + k] = namespace_configuration.get_routes()[k] self._routes.update(namespace_routes) def can_route(self, path): return path in self._routes def route(self, path): return self._routes[path]
class PathTaskRouterStrategy(RouterStrategy): def __init__(self, task_configuration): pass def can_route(self, path): pass def route(self, path): pass
4
0
4
0
4
0
2
0.08
1
0
0
0
3
1
3
5
16
3
12
8
8
1
12
8
8
3
1
2
5
7,825
AtteqCom/zsl
AtteqCom_zsl/src/zsl/router/task.py
zsl.router.task.PackageTaskRouterStrategy
class PackageTaskRouterStrategy(RouterStrategy): def __init__(self, task_configuration, debug): # type: (TaskConfiguration)->None self._namespaces = task_configuration.namespaces self._debug = debug def can_route(self, path): return True def _load_module(self, module_name): # Debug loading provides better output. if self._debug: full = [] for p in module_name.split('.'): full.append(p) importlib.import_module('.'.join(full)) return importlib.import_module(module_name) def is_task_reloading(self): return self._debug def route(self, path): # type:(str)->Callable # Finding the path in task packages. logger = logging.getLogger(__name__) module_ = None exceptions = [] class_name = None for task_namespace in self._namespaces: if not path.startswith(task_namespace.namespace): continue # Split the path into arrays - package names in the tasks package. class_name, package_path = self._split_path(path, task_namespace) task_packages = task_namespace.get_packages() module_, exceptions = self._find_task_in_namespace(task_packages, package_path, class_name) if module_ is not None: break if module_ is None: exception = RoutingError(path) logger.warning(str(exception)) for e in exceptions: logger.error("Reason", exc_info=e) raise exception if self.is_task_reloading(): importlib.reload(module_) cls = getattr(module_, class_name) return cls def _split_path(self, path, task_namespace): # type:(str, TaskNamespace)->Tuple[str, List[str]] package_path = path[len(task_namespace.namespace + '/'):] package_path = package_path.split("/") class_name = underscore_to_camelcase(package_path[-1]) return class_name, package_path def _find_task_in_namespace(self, task_packages, package_path, class_name): logger = logging.getLogger(__name__) exceptions = [] module_ = None for task_package in task_packages: module_name = "{0}.{1}".format(task_package, ".".join(package_path)) try: logger.debug("Trying to load module with name '%s' and class name '%s'.", module_name, class_name) module_ = self._load_module(module_name) break except ImportError as e: exceptions.append(e) if self._debug: logger.warning( "Could not load module with name '%s' and class name '%s', '%s'; proceeding to next module.", module_name, class_name, e) return module_, exceptions
class PackageTaskRouterStrategy(RouterStrategy): def __init__(self, task_configuration, debug): pass def can_route(self, path): pass def _load_module(self, module_name): pass def is_task_reloading(self): pass def route(self, path): pass def _split_path(self, path, task_namespace): pass def _find_task_in_namespace(self, task_packages, package_path, class_name): pass
8
0
10
1
9
1
3
0.1
1
3
1
0
7
2
7
9
80
12
62
30
54
6
58
29
50
7
1
3
18
7,826
AtteqCom/zsl
AtteqCom_zsl/src/zsl/router/method.py
zsl.router.method.Performer
class Performer: def __init__(self, f): global _default_responder_method self._f = f self.__name__ = "zsl-method-router-performer-of-" + f.__name__ self.__doc__ = f.__doc__ if hasattr(f, '__doc__') else None self._responder = None self.set_responder(_default_responder_method) def set_responder(self, responder): self._responder = responder def _call_inner_function(self, a, kw): return self._f(*a, **kw) @append_headers @notify_responders @convert_to_web_response @error_handler def __call__(self, *a, **kw): def task_callable(): return self._call_inner_function(a, kw) jc = WebJobContext(None, extract_data(request), task_callable, task_callable, request) return execute_web_task(jc, task_callable)
class Performer: def __init__(self, f): pass def set_responder(self, responder): pass def _call_inner_function(self, a, kw): pass @append_headers @notify_responders @convert_to_web_response @error_handler def __call__(self, *a, **kw): pass def task_callable(): pass
10
0
4
0
4
0
1
0
0
1
1
0
4
4
4
4
26
4
22
13
11
0
17
12
10
2
0
0
6
7,827
AtteqCom/zsl
AtteqCom_zsl/src/zsl/service/service.py
zsl.service.service.TransactionalSupportMixin
class TransactionalSupportMixin: """This mixin allows the objects to access transactional holder.""" @property def _orm(self) -> Session: return getattr(self, _TX_HOLDER_ATTRIBUTE, _EMPTY_TX_HOLDER).session
class TransactionalSupportMixin: '''This mixin allows the objects to access transactional holder.''' @property def _orm(self) -> Session: pass
3
1
2
0
2
0
1
0.25
0
0
0
3
1
0
1
1
6
1
4
3
1
1
3
2
1
1
0
0
1
7,828
AtteqCom/zsl
AtteqCom_zsl/src/zsl/interface/celery/worker.py
zsl.interface.celery.worker.CeleryTaskQueueOutsideWorker
class CeleryTaskQueueOutsideWorker(CeleryTaskQueueWorkerBase): """Celery worker used only for task execution. This can be used when the worker is managed with some other application, like `celery worker` or `celery multi`. """ def stop_worker(self): self._app.logger.error("Running from celery worker, kill from shell!") def run(self): self._app.logger.error("Running from celery worker, start from shell!")
class CeleryTaskQueueOutsideWorker(CeleryTaskQueueWorkerBase): '''Celery worker used only for task execution. This can be used when the worker is managed with some other application, like `celery worker` or `celery multi`. ''' def stop_worker(self): pass def run(self): pass
3
1
2
0
2
0
1
0.8
1
0
0
0
2
0
2
29
12
3
5
3
2
4
5
3
2
1
5
0
2
7,829
AtteqCom/zsl
AtteqCom_zsl/src/zsl/interface/task.py
zsl.interface.task.RequestJSONEncoder
class RequestJSONEncoder(JSONEncoder): def default(self, o): if isinstance(o, object): return dict(o.__dict__) else: return JSONEncoder.default(self, o)
class RequestJSONEncoder(JSONEncoder): def default(self, o): pass
2
0
5
0
5
0
2
0
1
1
0
0
1
0
1
5
6
0
6
2
4
0
5
2
3
2
2
1
2
7,830
AtteqCom/zsl
AtteqCom_zsl/src/zsl/errors.py
zsl.errors.ErrorConfiguration
class ErrorConfiguration: """ The configuration object for error handling. """ def __init__(self, handlers=None, processors=None, use_flask_handler=False): # type: (List[ErrorHandler], List[ErrorProcessor], bool)->None self._use_flask_handler = use_flask_handler self._handlers = handlers if handlers is not None else [] self._processors = processors if processors is not None else [] @property def use_flask_handler(self): # type:()->bool """ In case of web requests, flasks provides a convenient way of exception handling. This handler shows the stack trace, etc. On the other hand \ this setting will turn of ZSL's exception handling for web request. :return: Status """ return self._use_flask_handler @property def handlers(self): # type: ()->List[ErrorHandler] return self._handlers @property def processors(self): # type: ()->List[ErrorProcessor] return self._processors
class ErrorConfiguration: ''' The configuration object for error handling. ''' def __init__(self, handlers=None, processors=None, use_flask_handler=False): pass @property def use_flask_handler(self): ''' In case of web requests, flasks provides a convenient way of exception handling. This handler shows the stack trace, etc. On the other hand this setting will turn of ZSL's exception handling for web request. :return: Status ''' pass @property def handlers(self): pass @property def processors(self): pass
8
2
5
0
3
3
2
0.93
0
0
0
0
4
3
4
4
32
5
14
11
6
13
11
8
6
3
0
0
6
7,831
AtteqCom/zsl
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AtteqCom_zsl/src/zsl/application/modules/web/web_context_module.py
zsl.application.modules.web.web_context_module.WebCli
class WebCli: @inject(zsl_cli=ZslCli) def __init__(self, zsl_cli): # type: (ZslCli) -> None @zsl_cli.cli.group(help='Web related tasks.') def web(): pass @web.command(help="Run web server and serve the application") @click.option('--host', '-h', help="host to bind to", default='127.0.0.1') @click.option('--port', '-p', help="port to bind to", default=5000) @inject(web_handler=WebHandler) def run(web_handler, host, port): # type: (WebHandler, str, int)->None web_handler.run_web(host=host, port=port) self._web = web @property def web(self): return self._web
class WebCli: @inject(zsl_cli=ZslCli) def __init__(self, zsl_cli): pass @zsl_cli.cli.group(help='Web related tasks.') def web(): pass @web.command(help="Run web server and serve the application") @click.option('--host', '-h', help="host to bind to", default='127.0.0.1') @click.option('--port', '-p', help="port to bind to", default=5000) @inject(web_handler=WebHandler) def run(web_handler, host, port): pass @property def web(): pass
12
0
6
1
4
1
1
0.13
0
1
1
0
2
1
2
2
22
4
16
10
4
2
9
6
4
1
0
0
4
7,832
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/query_filter.py
zsl.db.helpers.query_filter.OperatorLeftLike
class OperatorLeftLike: """ Left side of string is like ... """ @staticmethod def apply(q, attr, v): return q.filter(attr.like('{0}%'.format(v)))
class OperatorLeftLike: ''' Left side of string is like ... ''' @staticmethod def apply(q, attr, v): pass
3
1
2
0
2
0
1
0.75
0
0
0
0
0
0
1
1
8
1
4
3
1
3
3
2
1
1
0
0
1
7,833
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/query_filter.py
zsl.db.helpers.query_filter.OperatorEq
class OperatorEq: @staticmethod def apply(q, attr, v): return q.filter(attr == v)
class OperatorEq: @staticmethod def apply(q, attr, v): pass
3
0
2
0
2
0
1
0
0
0
0
0
0
0
1
1
4
0
4
3
1
0
3
2
1
1
0
0
1
7,834
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/query_filter.py
zsl.db.helpers.query_filter.OperatorCompareDates
class OperatorCompareDates: """ Compares only dates, year is not taken into account. Compared date value must be string in format '%m-%d' """ @staticmethod def apply(q, attr, v): return q.filter(func.date_format(attr, '%m-%d') == v)
class OperatorCompareDates: ''' Compares only dates, year is not taken into account. Compared date value must be string in format '%m-%d' ''' @staticmethod def apply(q, attr, v): pass
3
1
2
0
2
0
1
1
0
0
0
0
0
0
1
1
9
1
4
3
1
4
3
2
1
1
0
0
1
7,835
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/query_filter.py
zsl.db.helpers.query_filter.OperatorBetween
class OperatorBetween: @staticmethod def apply(q, attr, v): return q.filter(attr.between(v[0], v[1]))
class OperatorBetween: @staticmethod def apply(q, attr, v): pass
3
0
2
0
2
0
1
0
0
0
0
0
0
0
1
1
4
0
4
3
1
0
3
2
1
1
0
0
1
7,836
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/pagination.py
zsl.db.helpers.pagination.PaginationResponse
class PaginationResponse(AppModel): def __init__(self, record_count, page_size, pagination): # type: (int, int, PaginationRequest)->None super().__init__({}) self.record_count = record_count max_page_size = pagination.page_size self.page_count = (record_count + max_page_size - 1) // max_page_size self.page_size = page_size self.max_page_size = max_page_size self.page_no = pagination.page_no
class PaginationResponse(AppModel): def __init__(self, record_count, page_size, pagination): pass
2
0
9
0
8
1
1
0.11
1
1
0
0
1
5
1
7
10
0
9
8
7
1
9
8
7
1
1
0
1
7,837
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/pagination.py
zsl.db.helpers.pagination.PaginationRequest
class PaginationRequest(AppModel): def __init__(self, raw=None, id_name='id', hints=None): if raw is None: raw = {'page_no': FIRST_PAGE, 'page_size': DEFAULT_PAGE_SIZE} super().__init__(raw, id_name, hints) self.page_no = int(self.page_no) if self.page_no else FIRST_PAGE self.page_size = int(self.page_size) if self.page_size else DEFAULT_PAGE_SIZE @staticmethod def create(page_no: int = FIRST_PAGE, page_size: int = DEFAULT_PAGE_SIZE) -> 'PaginationRequest': return PaginationRequest({'page_no': page_no, 'page_size': page_size})
class PaginationRequest(AppModel): def __init__(self, raw=None, id_name='id', hints=None): pass @staticmethod def create(page_no: int = FIRST_PAGE, page_size: int = DEFAULT_PAGE_SIZE) -> 'PaginationRequest': pass
4
0
5
1
4
0
3
0
1
2
0
0
1
2
2
8
12
2
10
6
6
0
9
5
6
4
1
1
5
7,838
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/pagination.py
zsl.db.helpers.pagination.Pagination
class Pagination: """ Pagination support. Allows to paginate a query. There are two choices. #. :meth:`.paginate` - paginates a query and obtains the count automatically. #. :meth:`.apply_pagination` - paginates a query and assumes that record count is set in advance. """ def __init__(self, pagination=None): # type: (Union[PaginationRequest, Dict[str, Union[str, int]], None])->None pagination = self._create_pagination_request(pagination) assert isinstance(pagination, PaginationRequest) self._offset = (pagination.page_no - FIRST_PAGE) * pagination.page_size self._page_size = pagination.page_size self._record_count = -1 def _create_pagination_request(self, pagination): # type: (Union[PaginationRequest, Dict[str, Union[str, int]], None])->PaginationRequest if pagination is None: pagination = PaginationRequest() elif isinstance(pagination, dict): page_size = int(pagination.get('page_size', DEFAULT_PAGE_SIZE)) pagination = PaginationRequest.create( FIRST_PAGE + int(pagination.get('offset', 0)) // page_size, page_size) return pagination @property def record_count(self): return self._record_count @record_count.setter def record_count(self, record_count): self._record_count = record_count @property def page_size(self): return self._page_size @page_size.setter def page_size(self, page_size): # type: (int)->None self._page_size = page_size @property def offset(self): count = self._record_count per_page = self.page_size if self._offset >= count: last_page_size = count % per_page if last_page_size == 0: last_page_size = per_page self._offset = count - last_page_size if self._offset < 0: self._offset = 0 return self._offset @offset.setter def offset(self, offset): # type:(int)->None self._offset = offset def apply_pagination(self, q): # type: (Query)->Query """ Filters the query so that a given page is returned. The record count must be set in advance. :param q: Query to be paged. :return: Paged query. """ assert self.record_count >= 0, "Record count must be set." return q.limit(self.page_size).offset(self.offset) def paginate(self, q): # type: (Query)->Query """ Filters the query so that a given page is returned. The record count is computed automatically from query. :param q: Query to be paged. :return: Paged query. """ self.record_count = q.count() return self.apply_pagination(q).all()
class Pagination: ''' Pagination support. Allows to paginate a query. There are two choices. #. :meth:`.paginate` - paginates a query and obtains the count automatically. #. :meth:`.apply_pagination` - paginates a query and assumes that record count is set in advance. ''' def __init__(self, pagination=None): pass def _create_pagination_request(self, pagination): pass @property def record_count(self): pass @record_count.setter def record_count(self): pass @property def page_size(self): pass @page_size.setter def page_size(self): pass @property def offset(self): pass @offset.setter def offset(self): pass def apply_pagination(self, q): ''' Filters the query so that a given page is returned. The record count must be set in advance. :param q: Query to be paged. :return: Paged query. ''' pass def paginate(self, q): ''' Filters the query so that a given page is returned. The record count is computed automatically from query. :param q: Query to be paged. :return: Paged query. ''' pass
17
3
6
0
4
2
2
0.51
0
3
1
0
10
3
10
10
85
11
49
24
32
25
40
18
29
4
0
2
15
7,839
AtteqCom/zsl
AtteqCom_zsl/src/zsl/contrib/sentry/sentry_config.py
zsl.contrib.sentry.sentry_config.SentryConfiguration
class SentryConfiguration(namedtuple('SentryConfiguration', ['dsn', 'environment', 'tags', 'register_logging_handler', 'sentry_logging_handler_level'])): __slots__ = () def __new__(cls, dsn, environment=None, tags=None, register_logging_handler=True, sentry_logging_handler_level=logging.ERROR): tags = [] if tags is None else tags return super(SentryConfiguration, cls).__new__(cls, dsn, environment, tags, register_logging_handler, sentry_logging_handler_level)
class SentryConfiguration(namedtuple('SentryConfiguration', ['dsn', 'environment', 'tags', 'register_logging_handler', 'sentry_logging_handler_level'])): def __new__(cls, dsn, environment=None, tags=None, register_logging_handler=True, sentry_logging_handler_level=logging.ERROR): pass
2
0
5
0
5
0
2
0
1
1
0
0
1
0
1
6
9
1
8
5
4
0
5
3
3
2
1
0
2
7,840
AtteqCom/zsl
AtteqCom_zsl/src/zsl/constants.py
zsl.constants.MimeType
class MimeType(Enum): APPLICATION_JSON = 'application/json'
class MimeType(Enum): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
49
2
0
2
2
1
0
2
2
1
0
4
0
0
7,841
AtteqCom/zsl
AtteqCom_zsl/src/zsl/constants.py
zsl.constants.HttpHeaders
class HttpHeaders(Enum): CONTENT_TYPE = 'Content-Type'
class HttpHeaders(Enum): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
49
2
0
2
2
1
0
2
2
1
0
4
0
0
7,842
AtteqCom/zsl
AtteqCom_zsl/src/zsl/configuration/__init__.py
zsl.configuration.InvalidConfigurationException
class InvalidConfigurationException(Exception): pass
class InvalidConfigurationException(Exception): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
10
2
0
2
1
1
0
2
1
1
0
3
0
0
7,843
AtteqCom/zsl
AtteqCom_zsl/src/zsl/application/service_application.py
zsl.application.service_application.ServiceApplication
class ServiceApplication(Flask): """Atteq Service Flask application.""" VERSION = version def __init__( self, import_name, static_url_path=None, static_folder="static", static_host=None, host_matching=False, subdomain_matching=False, template_folder="templates", instance_path=None, instance_relative_config=False, root_path=None, modules=None, version=None, config_object=None, default_settings_module="settings.default_settings", ): super().__init__( import_name, static_url_path, static_folder, static_host, host_matching, subdomain_matching, template_folder, instance_path, instance_relative_config, root_path, ) self._dependencies_initialized = False self._default_settings_module = default_settings_module self._is_initialized = False self._injector = None self._worker = None self._configure(config_object) self._app_version = version if not modules: from zsl.application.containers.core_container import CoreContainer modules = CoreContainer.modules() self._configure_injector(modules) self._initialize() self._dependencies_initialized = True def __str__(self): return "ZSL(application={0}, zsl_version={1}, app_version={2})".format( self.name, self.VERSION, self._app_version ) def _configure(self, config_object=None): # type: (Any) -> None """Read the configuration from config files. Loads the default settings and the profile settings if available. Check :func:`.set_profile`. :param config_object: This parameter is the configuration decscription may be a dict or string describing the module from which the configuration is used. Default is settings.default_settings. """ if config_object: self.config.from_mapping(config_object) else: self.config.from_object(self._default_settings_module) zsl_settings = os.environ.get(SETTINGS_ENV_VAR_NAME) if zsl_settings is not None: if os.path.exists(zsl_settings): self.config.from_envvar(SETTINGS_ENV_VAR_NAME) else: self.logger.warning( f"Warning: The settings file '{zsl_settings}' does not exist. Using default settings." ) def _initialize(self): """Run the initializers.""" ctx = self.injector.get(InitializationContext) ctx.initialize() def _register(self): """Register the current instance into application stack.""" set_current_app(self) def _get_app_module(self): # type: () -> Callable """Returns a module which binds the current app and configuration. :return: configuration callback :rtype: Callable """ def configure(binder): # type: (Binder) -> Callable binder.bind(ServiceApplication, to=self, scope=singleton) binder.bind(Config, to=self.config, scope=singleton) return configure def _configure_injector(self, modules): """Create the injector and install the modules. There is a necessary order of calls. First we have to bind `Config` and `Zsl`, then we need to register the app into the global stack and then we can install all other modules, which can use `Zsl` and `Config` injection. :param modules: list of injection modules :type modules: list """ self._register() self._create_injector() self._bind_core() self._bind_modules(modules) self.logger.debug("Injector configuration with modules {0}.".format(modules)) self._dependencies_initialized = True @deprecated def get_initialization_context(self): return self.injector.get(InitializationContext) def is_initialized(self): return self._dependencies_initialized @property def injector(self): # type: () -> Injector return self._injector @injector.setter def injector(self, value): self._injector = value def get_version(self): return self.version @property def version(self): app_version = self.app_version if app_version is None: return self.zsl_version else: return self.zsl_version + ":" + app_version @property def zsl_version(self): return ServiceApplication.VERSION @property def app_version(self): return self._app_version def _create_injector(self): self.logger.debug("Creating injector") self._injector = Injector() def _bind_core(self): self._injector.binder.bind(ServiceApplication, self, singleton) self._injector.binder.bind(Config, self.config, singleton) def _bind_modules(self, modules): for module in modules: self.injector.binder.install(module)
class ServiceApplication(Flask): '''Atteq Service Flask application.''' def __init__( self, import_name, static_url_path=None, static_folder="static", static_host=None, host_matching=False, subdomain_matching=False, template_folder="templates", instance_path=None, instance_relative_config=False, root_path=None, modules=None, version=None, config_object=None, default_settings_module="settings.default_settings", ): pass def __str__(self): pass def _configure(self, config_object=None): '''Read the configuration from config files. Loads the default settings and the profile settings if available. Check :func:`.set_profile`. :param config_object: This parameter is the configuration decscription may be a dict or string describing the module from which the configuration is used. Default is settings.default_settings. ''' pass def _initialize(self): '''Run the initializers.''' pass def _register(self): '''Register the current instance into application stack.''' pass def _get_app_module(self): '''Returns a module which binds the current app and configuration. :return: configuration callback :rtype: Callable ''' pass def configure(binder): pass def _configure_injector(self, modules): '''Create the injector and install the modules. There is a necessary order of calls. First we have to bind `Config` and `Zsl`, then we need to register the app into the global stack and then we can install all other modules, which can use `Zsl` and `Config` injection. :param modules: list of injection modules :type modules: list ''' pass @deprecated def get_initialization_context(self): pass def is_initialized(self): pass @property def injector(self): pass @injector.setter def injector(self): pass def get_version(self): pass @property def version(self): pass @property def zsl_version(self): pass @property def app_version(self): pass def _create_injector(self): pass def _bind_core(self): pass def _bind_modules(self, modules): pass
26
6
8
0
6
1
1
0.24
1
3
2
0
18
6
18
18
168
28
113
54
70
27
73
32
52
4
1
2
25
7,844
AtteqCom/zsl
AtteqCom_zsl/src/zsl/application/modules/web/web_context_module.py
zsl.application.modules.web.web_context_module.WebInitializer
class WebInitializer: """Initialize the web application.""" @staticmethod def initialize(): """ Import in this form is necessary so that we avoid the unwanted behavior and immediate initialization of the application objects. This makes the initialization procedure run in the time when it is necessary and has every required resources. """ from zsl.interface.web.performers.default import create_not_found_mapping from zsl.interface.web.performers.resource import create_resource_mapping create_not_found_mapping() create_resource_mapping()
class WebInitializer: '''Initialize the web application.''' @staticmethod def initialize(): ''' Import in this form is necessary so that we avoid the unwanted behavior and immediate initialization of the application objects. This makes the initialization procedure run in the time when it is necessary and has every required resources. ''' pass
3
2
11
1
5
5
1
0.86
0
0
0
0
0
0
1
1
15
2
7
5
2
6
6
4
2
1
0
0
1
7,845
AtteqCom/zsl
AtteqCom_zsl/src/zsl/application/modules/web/cors.py
zsl.application.modules.web.cors.CORSConfiguration
class CORSConfiguration: DEFAULT_ALLOW_HEADERS = ['Accept', 'Origin', 'Content-Type', 'Authorization'] DEFAULT_EXPOSE_HEADERS = ['Location', 'X-Total-Count', 'Link'] DEFAULT_MAX_AGE = 21600 def __init__(self, origin='', allow_headers=None, expose_headers=None, max_age=DEFAULT_MAX_AGE): # type: (str, Union[List[str], str], Union[List[str], str], int)->None if allow_headers is None: allow_headers = self.DEFAULT_ALLOW_HEADERS if expose_headers is None: expose_headers = self.DEFAULT_EXPOSE_HEADERS self._origin = origin self._allow_headers = allow_headers self._expose_headers = expose_headers self._max_age = max_age @property def origin(self): # type: ()->str return self._origin @property def allow_headers(self): # type: ()->Union[List[str], str] return self._allow_headers @property def expose_headers(self): # type: ()->Union[List[str], str] return self._expose_headers @property def max_age(self): # type: ()->int return self._max_age
class CORSConfiguration: def __init__(self, origin='', allow_headers=None, expose_headers=None, max_age=DEFAULT_MAX_AGE): pass @property def origin(self): pass @property def allow_headers(self): pass @property def expose_headers(self): pass @property def max_age(self): pass
10
0
5
1
4
1
1
0.19
0
0
0
0
5
4
5
5
40
8
27
18
16
5
21
13
15
3
0
1
7
7,846
AtteqCom/zsl
AtteqCom_zsl/src/zsl/application/modules/web/configuration.py
zsl.application.modules.web.configuration.MethodConfiguration
class MethodConfiguration: def __init__(self, package=None, packages=None, url_prefix='method'): if packages is not None and package is not None: raise InvalidConfigurationException("Can not take both packages and package in method configuration.") packages = tuple(packages) if packages is not None else () self._packages = (package,) if package is not None else packages if '/' in url_prefix: raise InvalidConfigurationException("MethodConfiguration url_prefix parameter can not contain slashes.") self._url_prefix = url_prefix @property def url_prefix(self): return self._url_prefix @property def packages(self): return self._packages
class MethodConfiguration: def __init__(self, package=None, packages=None, url_prefix='method'): pass @property def url_prefix(self): pass @property def packages(self): pass
6
0
4
0
4
0
2
0
0
2
1
0
3
2
3
3
18
3
15
8
9
0
13
6
9
5
0
1
7
7,847
AtteqCom/zsl
AtteqCom_zsl/src/zsl/application/modules/gearman_module.py
zsl.application.modules.gearman_module.GearmanModule
class GearmanModule(Module): """Adds gearman to current configuration.""" def configure(self, binder): binder.bind(TaskQueueWorker, to=ClassProvider(GearmanTaskQueueWorker), scope=singleton) simple_bind(binder, GearmanCli, singleton)
class GearmanModule(Module): '''Adds gearman to current configuration.''' def configure(self, binder): pass
2
1
3
0
3
0
1
0.25
1
3
3
0
1
0
1
1
6
1
4
2
2
1
4
2
2
1
1
0
1
7,848
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/query_filter.py
zsl.db.helpers.query_filter.OperatorNeq
class OperatorNeq: @staticmethod def apply(q, attr, v): return q.filter(attr != v)
class OperatorNeq: @staticmethod def apply(q, attr, v): pass
3
0
2
0
2
0
1
0
0
0
0
0
0
0
1
1
4
0
4
3
1
0
3
2
1
1
0
0
1
7,849
AtteqCom/zsl
AtteqCom_zsl/src/zsl/interface/celery/worker.py
zsl.interface.celery.worker.CeleryTaskQueueMainWorker
class CeleryTaskQueueMainWorker(CeleryTaskQueueWorkerBase): """Worker implementation for Celery task queue.""" def __init__( self, ): super().__init__() self.celery_app = create_celery_app() def stop_worker(self): self._app.logger.error( "This is a celery app worker, kill the instance to stop it." ) def run(self, argv: list[str]): """ Run the celery worker cmd with given arguments from the list. Note: the first argument should be "worker". """ self._app.logger.info("Running the worker.") self.celery_app.worker_main(argv)
class CeleryTaskQueueMainWorker(CeleryTaskQueueWorkerBase): '''Worker implementation for Celery task queue.''' def __init__( self, ): pass def stop_worker(self): pass def run(self, argv: list[str]): ''' Run the celery worker cmd with given arguments from the list. Note: the first argument should be "worker". ''' pass
4
2
6
0
4
1
1
0.38
1
3
0
0
3
1
3
30
22
4
13
7
7
5
9
5
5
1
5
0
3
7,850
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/query_filter.py
zsl.db.helpers.query_filter.OperatorRightLike
class OperatorRightLike: """ Right side of string is like ... """ @staticmethod def apply(q, attr, v): return q.filter(attr.like('%{0}'.format(v)))
class OperatorRightLike: ''' Right side of string is like ... ''' @staticmethod def apply(q, attr, v): pass
3
1
2
0
2
0
1
0.75
0
0
0
0
0
0
1
1
8
1
4
3
1
3
3
2
1
1
0
0
1
7,851
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/model/app_model_json_encoder.py
zsl.db.model.app_model_json_encoder.AppModelJSONEncoder
class AppModelJSONEncoder(JSONEncoder): def default(self, o): if isinstance(o, AppModel): return o.get_attributes() elif isinstance(o, object): return dict(o.__dict__) else: return JSONEncoder.default(self, o)
class AppModelJSONEncoder(JSONEncoder): def default(self, o): pass
2
0
7
0
7
0
3
0
1
2
1
0
1
0
1
5
8
0
8
2
6
0
6
2
4
3
2
1
3
7,852
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/query_filter.py
zsl.db.helpers.query_filter.RelationshipOperatorContains
class RelationshipOperatorContains: @staticmethod def apply(q, attr, v): return q.filter(attr.contains(v))
class RelationshipOperatorContains: @staticmethod def apply(q, attr, v): pass
3
0
2
0
2
0
1
0
0
0
0
0
0
0
1
1
4
0
4
3
1
0
3
2
1
1
0
0
1
7,853
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/query_helper.py
zsl.db.helpers.query_helper.QueryHelper
class QueryHelper: def __init__(self, cls, query_filter, pagination, sorter): self._cls = cls if not isinstance(query_filter, QueryFilter): query_filter = QueryFilter(query_filter) self._query_filter = query_filter if not isinstance(pagination, Pagination): pagination = Pagination(pagination) self._pagination = pagination if not isinstance(sorter, Sorter): sorter = Sorter(sorter) self._sorter = sorter def execute(self, q): q = self._query_filter.apply_query_filter(q, self._cls) q = self._sorter.apply_sorter(q, self._cls) return self._pagination.paginate(q) def get_pagination(self): return self._pagination def get_sorter(self): return self._sorter
class QueryHelper: def __init__(self, cls, query_filter, pagination, sorter): pass def execute(self, q): pass def get_pagination(self): pass def get_sorter(self): pass
5
0
6
1
5
0
2
0
0
3
3
0
4
4
4
4
26
6
20
9
15
0
20
9
15
4
0
1
7
7,854
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/sorter.py
zsl.db.helpers.sorter.Sorter
class Sorter: """ Helper class for applying ordering criteria to query. """ def __init__(self, sorter, mappings=None): """ sorter = {'sortby': string, 'sort': string} sortby - string of comma-separated column names by which you want to order sort - string of comma-separated values 'ASC'/'DESC' (order direction) which set order direction to corresponding columns from sorter['sortby'] string notes: - if 'sortby' key is not in sorter, no sorting will be applied to query - if 'sort' key is not in sorter, DEFAULT_SORT_ORDER will be applied to all columns from sorter['sortby'] - if sorter['sort'] == 'ASC' / 'DESC' (contains only one order direction), this direction will be applied to all columns from sorter['sortby'] - if you want to order only by one column, simply put sorter['sortby'] = '<column_name>' - without comma at the end of the string mappings dict - maps column names from sorter['sortby'] to column attributes names of objects (see example) - if the column names from sorter['sortby'] is equal to the name of column attribute, it doesn`t have to be mentioned in mappings Example: sorter = {'sortby': 'firstname,state,sport', 'sort': 'ASC'} mappings = { 'state': (State, 'name_sk'), 'sport': (Sport, 'name'), } """ if mappings is None: mappings = [] if 'sortby' in sorter: self._fields = sorter['sortby'].split(',') if 'sort' in sorter: self._orders = sorter['sort'].split(',') if len(self._orders) == 1: self._orders *= len(self._fields) elif len(self._orders) != len(self._fields): raise Exception( 'zsl.db.helpers.Sorter: Number of order settings is nor zero nor one nor equal to number of' 'sortby columns.') else: self._orders = [DEFAULT_SORT_ORDER] * len(self._fields) self._enabled = True else: self._enabled = False self._mappings = mappings def is_enabled(self): return self._enabled def get_fields(self): return self._fields def get_orders(self): return self._orders def apply_sorter(self, q, cls): if self.is_enabled(): sorter_settings = [] for field, order in zip(self.get_fields(), self.get_orders()): if field in self._mappings: (cls, mapped_field) = self._mappings[field] attr = getattr(cls, mapped_field) else: attr = getattr(cls, field) if order == "DESC": # If changed, look at the DEFAULT_SORT_ORDER definition. sorter_settings.append(desc(attr)) else: sorter_settings.append(asc(attr)) return q.order_by(*sorter_settings) else: return q
class Sorter: ''' Helper class for applying ordering criteria to query. ''' def __init__(self, sorter, mappings=None): ''' sorter = {'sortby': string, 'sort': string} sortby - string of comma-separated column names by which you want to order sort - string of comma-separated values 'ASC'/'DESC' (order direction) which set order direction to corresponding columns from sorter['sortby'] string notes: - if 'sortby' key is not in sorter, no sorting will be applied to query - if 'sort' key is not in sorter, DEFAULT_SORT_ORDER will be applied to all columns from sorter['sortby'] - if sorter['sort'] == 'ASC' / 'DESC' (contains only one order direction), this direction will be applied to all columns from sorter['sortby'] - if you want to order only by one column, simply put sorter['sortby'] = '<column_name>' - without comma at the end of the string mappings dict - maps column names from sorter['sortby'] to column attributes names of objects (see example) - if the column names from sorter['sortby'] is equal to the name of column attribute, it doesn`t have to be mentioned in mappings Example: sorter = {'sortby': 'firstname,state,sport', 'sort': 'ASC'} mappings = { 'state': (State, 'name_sk'), 'sport': (Sport, 'name'), } ''' pass def is_enabled(self): pass def get_fields(self): pass def get_orders(self): pass def apply_sorter(self, q, cls): pass
6
2
15
2
8
5
3
0.67
0
2
0
0
5
4
5
5
83
14
42
14
36
28
34
14
28
6
0
3
14
7,855
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/model/app_model.py
zsl.db.model.app_model.AppModel
class AppModel: """AppModel's are used as a thin and simple communication objects. Also they can be saved into cache. Basically they are known as Data Transfer Objects or DTOs. .. automethod:: __init__ """ _not_serialized_attributes = ['_not_serialized_attributes', '_hints', '_id_name'] def __init__(self, raw, id_name='id', hints=None): """ The application model model constructor. :param raw: Dictionary of properties of the raw data. :param id_name: Name of the identifier property. :param hints: Tells which of the raw attributes are date or datetime string and what is theirs format. Example: ``` { DATE_DATA: { 'birthday': '%d.%m.%Y' }, DATETIME_DATA: { 'created': '%Y-%m-%d %H:%M:%S' } } ``` this attributes are then saved in the standard zsl service date/datetime format (consult :mod:`zsl.utils.date_helper` for more.) """ extend_object_by_dict(self, raw, hints) self._id_name = id_name def get_id(self): return self.__dict__[self._id_name] def _set_id_name(self, id_name): self._id_name = id_name @staticmethod def convert(v): if isinstance(v, AppModel): return v.get_attributes() else: return v def get_attributes(self): d = dict(self.__dict__) for k in self.__dict__: if k in self._not_serialized_attributes: d.pop(k) elif isinstance(d[k], AppModel): d[k] = self.convert(d[k]) elif isinstance(d[k], list): d[k] = list(map(self.convert, d[k])) elif isinstance(d[k], tuple): d[k] = list(map(self.convert, d[k])) d[k] = tuple(d[k]) elif isinstance(d[k], dict): for key, value in getattr(self, k).items(): d[k][key] = self.convert(value) return d def __str__(self): return "{0}: {1}".format(self.__class__, self.__dict__)
class AppModel: '''AppModel's are used as a thin and simple communication objects. Also they can be saved into cache. Basically they are known as Data Transfer Objects or DTOs. .. automethod:: __init__ ''' def __init__(self, raw, id_name='id', hints=None): ''' The application model model constructor. :param raw: Dictionary of properties of the raw data. :param id_name: Name of the identifier property. :param hints: Tells which of the raw attributes are date or datetime string and what is theirs format. Example: ``` { DATE_DATA: { 'birthday': '%d.%m.%Y' }, DATETIME_DATA: { 'created': '%Y-%m-%d %H:%M:%S' } } ``` this attributes are then saved in the standard zsl service date/datetime format (consult :mod:`zsl.utils.date_helper` for more.) ''' pass def get_id(self): pass def _set_id_name(self, id_name): pass @staticmethod def convert(v): pass def get_attributes(self): pass def __str__(self): pass
8
2
9
1
5
3
2
0.62
0
4
0
6
5
1
6
6
71
16
34
13
26
21
27
12
20
8
0
3
14
7,856
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/model/raw_model.py
zsl.db.model.raw_model.ModelBase
class ModelBase: def update(self, app_model, forbidden_keys=None, inverse=False): """ Updates the raw model. Consult `zsl.utils.model_helper.update_model`. """ if forbidden_keys is None: forbidden_keys = [] update_model(self, app_model, forbidden_keys, inverse) def get_app_model(self, id_name='id', hints=None): return self.__app_model__(self.__dict__, id_name, hints)
class ModelBase: def update(self, app_model, forbidden_keys=None, inverse=False): ''' Updates the raw model. Consult `zsl.utils.model_helper.update_model`. ''' pass def get_app_model(self, id_name='id', hints=None): pass
3
1
5
1
3
2
2
0.43
0
0
0
2
2
0
2
2
12
2
7
3
4
3
7
3
4
2
0
1
3
7,857
AtteqCom/zsl
AtteqCom_zsl/src/zsl/db/helpers/query_filter.py
zsl.db.helpers.query_filter.QueryFilter
class QueryFilter: """ Helper class for applying filter criteria to query. """ def __init__(self, query_filter, mappings=None, allow_null=False): """ query_filter = {FILTER_VALUES: dict, FILTER_HINT: dict} - FILTER_VALUES dictionary (see example) - FILTER_HINTS dictionary - tells which operator (OperatorEq, OperatorBetween, ...) use to which key from FILTER_VALUES dictionary mappings dict - maps keys from FILTER_VALUES to column attributes names of objects (see example) - if the key from FILTER_VALUES is equal to the name of column attribute, it doesn`t have to be mentioned in mappings allow_null boolean - if False (default value), None values from FILTER_VALUES will be ignored Example: query_filter = { FILTER_VALUES: {'fullname': 'jessica', 'lastname_initial': None}, FILTER_HINT: { 'fullname': OperatorLike, 'lastname_initial': OperatorLeftLike, 'exclude_cid': OperatorNeq, } } mappings = { 'lastname_initial': (Celebrity, 'lastname'), 'exclude_cid': (Celebrity, 'cid'), } Notes: - 'fullname' key from FILTER_VALUES corresponds to column attribute Celebrity.fullname, it doesn`t have to be mentioned in mappings - if allow_null == False, key 'lastname_initial' from FILTER_VALUES will be ignored - Celebrity is a sqlalechemy db model """ if mappings is None: mappings = [] self._query_filter = query_filter self._allow_null = allow_null self._mappings = mappings def apply_query_filter(self, q, cls): hints = self._query_filter[FILTER_HINT] values = self._query_filter[FILTER_VALUES] for k, v in values.items(): if v is None and not self._allow_null: continue if k in self._mappings: (cls, field) = self._mappings[k] attr = getattr(cls, field) else: attr = getattr(cls, k) q = hints[k]().apply(q, attr, v) return q
class QueryFilter: ''' Helper class for applying filter criteria to query. ''' def __init__(self, query_filter, mappings=None, allow_null=False): ''' query_filter = {FILTER_VALUES: dict, FILTER_HINT: dict} - FILTER_VALUES dictionary (see example) - FILTER_HINTS dictionary - tells which operator (OperatorEq, OperatorBetween, ...) use to which key from FILTER_VALUES dictionary mappings dict - maps keys from FILTER_VALUES to column attributes names of objects (see example) - if the key from FILTER_VALUES is equal to the name of column attribute, it doesn`t have to be mentioned in mappings allow_null boolean - if False (default value), None values from FILTER_VALUES will be ignored Example: query_filter = { FILTER_VALUES: {'fullname': 'jessica', 'lastname_initial': None}, FILTER_HINT: { 'fullname': OperatorLike, 'lastname_initial': OperatorLeftLike, 'exclude_cid': OperatorNeq, } } mappings = { 'lastname_initial': (Celebrity, 'lastname'), 'exclude_cid': (Celebrity, 'cid'), } Notes: - 'fullname' key from FILTER_VALUES corresponds to column attribute Celebrity.fullname, it doesn`t have to be mentioned in mappings - if allow_null == False, key 'lastname_initial' from FILTER_VALUES will be ignored - Celebrity is a sqlalechemy db model ''' pass def apply_query_filter(self, q, cls): pass
3
2
27
4
10
14
3
1.5
0
0
0
0
2
3
2
2
59
9
20
11
17
30
19
11
16
4
0
2
6
7,858
AtteqCom/zsl_client
python/zsl_client.py
zsl_client.SecuredTask
class SecuredTask(TaskDecorator): def get_name(self): return TaskDecorator.get_name(self) def set_asl(self, asl): self._asl = asl def get_data(self): random_token = _random_string(16) return { "data": self._task.get_data(), "security": { "random_token": random_token, "hashed_token": hashlib.sha1(random_token + self._asl.get_secure_token()).hexdigest().upper() } }
class SecuredTask(TaskDecorator): def get_name(self): pass def set_asl(self, asl): pass def get_data(self): pass
4
0
4
0
4
0
1
0
1
0
0
0
3
1
3
8
16
2
14
6
10
0
8
6
4
1
3
0
3
7,859
AtteqCom/zsl_client
python/zsl_client.py
zsl_client.WebService
class WebService(Service): def __init__(self, web_config, security_config): super(WebService, self).__init__() self._web_config = web_config self._security_config = security_config self._service_layer_url = self._web_config['SERVICE_LAYER_URL'] def get_service_layer_url(self): return self._service_layer_url def _inner_call(self, name, data): if data is None: data = "null" elif not isinstance(data, str): data = str(data) req = requests.post(self._service_layer_url + name, json=data) return req.text
class WebService(Service): def __init__(self, web_config, security_config): pass def get_service_layer_url(self): pass def _inner_call(self, name, data): pass
4
0
5
1
5
0
2
0
1
2
0
0
3
3
3
10
19
4
15
8
11
0
14
8
10
3
2
1
5
7,860
AtteqCom/zsl_client
python/zsl_client.py
zsl_client.ErrorTaskResult
class ErrorTaskResult(TaskResult, TaskResultDecorator): def get_complete_result(self): result = self._task_result.get_result() return result def get_result(self): result = self._task_result.get_result() return json.loads(result['data']) def is_error(self): result = self._task_result.get_result() return True if 'error' in result else False def get_error(self): return self._task_result.get_result()['error']
class ErrorTaskResult(TaskResult, TaskResultDecorator): def get_complete_result(self): pass def get_result(self): pass def is_error(self): pass def get_error(self): pass
5
0
3
0
3
0
1
0
2
0
0
0
4
0
4
7
15
3
12
8
7
0
12
8
7
2
2
0
5
7,861
AtteqCom/zsl_client
python/zsl_client.py
zsl_client.GearmanService
class GearmanService(Service): def __init__(self, gearman_config, security_config=None): super(GearmanService, self).__init__() self._gearman_config = gearman_config self._security_config = security_config self._gearman_client = gearman.client.GearmanClient(self._gearman_config['HOST']) self._blocking_status = True def set_blocking(self, blocking_status): self._blocking_status = blocking_status def _inner_call(self, name, data): if data is None: data = "null" elif not isinstance(data, str): data = str(data) completed_job_request = self._gearman_client.submit_job( self._gearman_config['TASK_NAME'], json.dumps({ 'path': name, 'data': data }), background=not self._blocking_status ) if self._blocking_status: return completed_job_request.result
class GearmanService(Service): def __init__(self, gearman_config, security_config=None): pass def set_blocking(self, blocking_status): pass def _inner_call(self, name, data): pass
4
0
9
1
8
0
2
0
1
2
0
0
3
4
3
10
29
5
24
9
20
0
16
9
12
4
2
1
6
7,862
AtteqCom/zsl_client
python/zsl_client.py
zsl_client.JsonTask
class JsonTask(TaskDecorator): def get_name(self): return TaskDecorator.get_name(self) def get_data(self): data = self._task.get_data() return json.dumps(data)
class JsonTask(TaskDecorator): def get_name(self): pass def get_data(self): pass
3
0
3
0
3
0
1
0
1
0
0
0
2
0
2
7
7
1
6
4
3
0
6
4
3
1
3
0
2
7,863
AtteqCom/zsl_client
python/zsl_client.py
zsl_client.JsonTaskResult
class JsonTaskResult(TaskResult, TaskResultDecorator): def get_result(self): result = self._task_result.get_result() return json.loads(result)
class JsonTaskResult(TaskResult, TaskResultDecorator): def get_result(self): pass
2
0
3
0
3
0
1
0
2
0
0
0
1
0
1
4
4
0
4
3
2
0
4
3
2
1
2
0
1
7,864
AtteqCom/zsl_client
python/zsl_client.py
zsl_client.TaskDecorator
class TaskDecorator(Task): def __init__(self, task): self._task = task def get_name(self): return self._task.get_name() def get_data(self): return self._task.get_data()
class TaskDecorator(Task): def __init__(self, task): pass def get_name(self): pass def get_data(self): pass
4
0
2
0
2
0
1
0
1
0
0
2
3
1
3
5
9
2
7
5
3
0
7
5
3
1
2
0
3
7,865
AtteqCom/zsl_client
python/zsl_client.py
zsl_client.TaskResultDecorator
class TaskResultDecorator(object): def __init__(self, task_result): assert isinstance(task_result, TaskResult) self._task_result = task_result
class TaskResultDecorator(object): def __init__(self, task_result): pass
2
0
3
0
3
0
1
0
1
1
1
2
1
1
1
1
4
0
4
3
2
0
4
3
2
1
1
0
1
7,866
AtteqCom/zsl_client
python/zsl_client.py
zsl_client.RawTaskResult
class RawTaskResult(TaskResult): def __init__(self, task, result): assert isinstance(task, Task) self._task = task self._result = result def get_task(self): return self._task def get_result(self): return self._result
class RawTaskResult(TaskResult): def __init__(self, task, result): pass def get_task(self): pass def get_result(self): pass
4
0
3
0
3
0
1
0
1
1
1
0
3
2
3
5
11
2
9
6
5
0
9
6
5
1
2
0
3
7,867
AtteqCom/zsl_client
python/zsl_client.py
zsl_client.RawTask
class RawTask(Task): def __init__(self, name, data): self._name = name self._data = data def get_name(self): return self._name name = property(get_name) def get_data(self): return self._data data = property(get_data)
class RawTask(Task): def __init__(self, name, data): pass def get_name(self): pass def get_data(self): pass
4
0
2
0
2
0
1
0
1
0
0
0
3
2
3
5
14
4
10
8
6
0
10
8
6
1
2
0
3
7,868
Aula13/poloniex
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/Aula13_poloniex/poloniex/poloniex.py
poloniex.poloniex.Poloniex._PoloniexAuth
class _PoloniexAuth(_requests.auth.AuthBase): """Poloniex Request Authentication.""" def __init__(self, apikey, secret): self._apikey, self._secret = apikey, secret def __call__(self, request): signature = _hmac.new( str.encode(self._secret, 'utf-8'), str.encode(request.body, 'utf-8'), _hashlib.sha512 ) request.headers.update({"Key": self._apikey, "Sign": signature.hexdigest()}) return request
class _PoloniexAuth(_requests.auth.AuthBase): '''Poloniex Request Authentication.''' def __init__(self, apikey, secret): pass def __call__(self, request): pass
3
1
6
0
6
0
1
0.08
1
1
0
0
2
2
2
2
16
3
12
5
9
1
7
5
4
1
1
0
2
7,869
Aula13/poloniex
Aula13_poloniex/poloniex/utils.py
poloniex.utils.AutoCastDict
class AutoCastDict(_collections_abc.Mapping): """Dictionary that automatically cast strings.""" def __init__(self, *args, **kwargs): self.__dict = dict(*args, **kwargs) def __getitem__(self, key): value = self.__dict[key] try: return _ast.literal_eval(value) except (ValueError, SyntaxError, TypeError): return value def __str__(self): items = ('{!r}: {!r}'.format(*it) for it in _six.iteritems(self)) return '{{{}}}'.format(', '.join(items)) __repr__ = __str__ def __iter__(self): return iter(self.__dict) def __len__(self): return len(self.__dict)
class AutoCastDict(_collections_abc.Mapping): '''Dictionary that automatically cast strings.''' def __init__(self, *args, **kwargs): pass def __getitem__(self, key): pass def __str__(self): pass def __iter__(self): pass def __len__(self): pass
6
1
3
0
3
0
1
0.06
1
4
0
0
5
1
5
39
25
7
17
10
11
1
17
10
11
2
6
1
6
7,870
Aula13/poloniex
Aula13_poloniex/poloniex/poloniex.py
poloniex.poloniex.Poloniex
class Poloniex(PoloniexPublic): """Client to connect to Poloniex private APIs.""" class _PoloniexAuth(_requests.auth.AuthBase): """Poloniex Request Authentication.""" def __init__(self, apikey, secret): self._apikey, self._secret = apikey, secret def __call__(self, request): signature = _hmac.new( str.encode(self._secret, 'utf-8'), str.encode(request.body, 'utf-8'), _hashlib.sha512 ) request.headers.update({"Key": self._apikey, "Sign": signature.hexdigest()}) return request def __init__(self, apikey=None, secret=None, public_url=_PUBLIC_URL, private_url=_PRIVATE_URL, limit=6, session_class=_requests.Session, session=None, startup_lock=None, semaphore=None, timer=None, nonce_iter=None, nonce_lock=None): """Initialize the Poloniex private client.""" super(Poloniex, self).__init__(public_url, limit, session_class, session, startup_lock, semaphore, timer) self._private_url = private_url self._apikey = apikey self._secret = secret self.nonce_lock = nonce_lock or _threading.RLock() self.nonce_iter = nonce_iter or _itertools.count(int(_time.time() * 1000)) @_api_wrapper def _private(self, command, **params): """Invoke the 'command' public API with optional params.""" if not self._apikey or not self._secret: raise PoloniexCredentialsException('missing apikey/secret') with self.nonce_lock: params.update({'command': command, 'nonce': next(self.nonce_iter)}) response = self.session.post( self._private_url, data=params, auth=Poloniex._PoloniexAuth(self._apikey, self._secret)) return response def returnBalances(self): """Returns all of your available balances.""" return self._private('returnBalances') def returnCompleteBalances(self, account=None): """Returns all of your balances, including available balance, balance on orders, and the estimated BTC value of your balance. By default, this call is limited to your exchange account; set the "account" POST parameter to "all" to include your margin and lending accounts.""" return self._private('returnCompleteBalances', account=account) def returnDepositAddresses(self): """Returns all of your deposit addresses.""" return self._private('returnDepositAddresses') def generateNewAddress(self, currency): """Generates a new deposit address for the currency specified by the "currency" POST parameter. Only one address per currency per day may be generated, and a new address may not be generated before the previously-generated one has been used.""" return self._private('generateNewAddress', currency=currency) def returnDepositsWithdrawals(self, start=0, end=2**32-1): """Returns your deposit and withdrawal history within a range, specified by the "start" and "end" POST parameters, both of which should be given as UNIX timestamps.""" return self._private('returnDepositsWithdrawals', start=start, end=end) def returnDeposits(self, start=0, end=2**32-1): """Returns your deposit history within a range, specified by the "start" and "end" POST parameters, both of which should be given as UNIX timestamps.""" return self.returnDepositsWithdrawals(start, end)['deposits'] def returnWithdrawals(self, start=0, end=2**32-1): """Returns your withdrawal history within a range, specified by the "start" and "end" POST parameters, both of which should be given as UNIX timestamps.""" return self.returnDepositsWithdrawals(start, end)['withdrawals'] def returnOpenOrders(self, currencyPair='all'): """Returns your open orders for a given market, specified by the "currencyPair" POST parameter, e.g. "BTC_XCP". Set "currencyPair" to "all" to return open orders for all markets.""" return self._private('returnOpenOrders', currencyPair=currencyPair) def returnTradeHistory(self, currencyPair='all', start=None, end=None, limit=500): """Returns your trade history for a given market, specified by the "currencyPair" POST parameter. You may specify "all" as the currencyPair to receive your trade history for all markets. You may optionally specify a range via "start" and/or "end" POST parameters, given in UNIX timestamp format; if you do not specify a range, it will be limited to one day.""" return self._private('returnTradeHistory', currencyPair=currencyPair, start=start, end=end, limit=limit) def returnTradeHistoryPublic(self, currencyPair, start=None, end=None): """Returns the past 200 trades for a given market, or up to 50,000 trades between a range specified in UNIX timestamps by the "start" and "end" GET parameters.""" return super(Poloniex, self).returnTradeHistory(currencyPair, start, end) def returnOrderTrades(self, orderNumber): """Returns all trades involving a given order, specified by the "orderNumber" POST parameter. If no trades for the order have occurred or you specify an order that does not belong to you, you will receive an error. """ return self._private('returnOrderTrades', orderNumber=orderNumber) def buy(self, currencyPair, rate, amount, fillOrKill=None, immediateOrCancel=None, postOnly=None): """Places a limit buy order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". If successful, the method will return the order number. You may optionally set "fillOrKill", "immediateOrCancel", "postOnly" to 1. A fill-or-kill order will either fill in its entirety or be completely aborted. An immediate-or-cancel order can be partially or completely filled, but any portion of the order that cannot be filled immediately will be canceled rather than left on the order book. A post-only order will only be placed if no portion of it fills immediately; this guarantees you will never pay the taker fee on any part of the order that fills.""" return self._private('buy', currencyPair=currencyPair, rate=rate, amount=amount, fillOrKill=fillOrKill, immediateOrCancel=immediateOrCancel, postOnly=postOnly) def sell(self, currencyPair, rate, amount, fillOrKill=None, immediateOrCancel=None, postOnly=None): """Places a sell order in a given market. Parameters and output are the same as for the buy method.""" return self._private('sell', currencyPair=currencyPair, rate=rate, amount=amount, fillOrKill=fillOrKill, immediateOrCancel=immediateOrCancel, postOnly=postOnly) def cancelOrder(self, orderNumber): """Cancels an order you have placed in a given market. Required POST parameter is "orderNumber".""" return self._private('cancelOrder', orderNumber=orderNumber) def moveOrder(self, orderNumber, rate, amount=None, postOnly=None, immediateOrCancel=None): """Cancels an order and places a new one of the same type in a single atomic transaction, meaning either both operations will succeed or both will fail. Required POST parameters are "orderNumber" and "rate"; you may optionally specify "amount" if you wish to change the amount of the new order. "postOnly" or "immediateOrCancel" may be specified for exchange orders, but will have no effect on margin orders. """ return self._private('moveOrder', orderNumber=orderNumber, rate=rate, amount=amount, postOnly=postOnly, immediateOrCancel=immediateOrCancel) def withdraw(self, currency, amount, address, paymentId=None): """Immediately places a withdrawal for a given currency, with no email confirmation. In order to use this method, the withdrawal privilege must be enabled for your API key. Required POST parameters are "currency", "amount", and "address". For XMR withdrawals, you may optionally specify "paymentId".""" return self._private('withdraw', currency=currency, amount=amount, address=address, paymentId=paymentId) def returnFeeInfo(self): """If you are enrolled in the maker-taker fee schedule, returns your current trading fees and trailing 30-day volume in BTC. This information is updated once every 24 hours.""" return self._private('returnFeeInfo') def returnAvailableAccountBalances(self, account=None): """Returns your balances sorted by account. You may optionally specify the "account" POST parameter if you wish to fetch only the balances of one account. Please note that balances in your margin account may not be accessible if you have any open margin positions or orders.""" return self._private('returnAvailableAccountBalances', account=account) def returnTradableBalances(self): """Returns your current tradable balances for each currency in each market for which margin trading is enabled. Please note that these balances may vary continually with market conditions.""" return self._private('returnTradableBalances') def transferBalance(self, currency, amount, fromAccount, toAccount): """Transfers funds from one account to another (e.g. from your exchange account to your margin account). Required POST parameters are "currency", "amount", "fromAccount", and "toAccount".""" return self._private('transferBalance', currency=currency, amount=amount, fromAccount=fromAccount, toAccount=toAccount) def returnMarginAccountSummary(self): """Returns a summary of your entire margin account. This is the same information you will find in the Margin Account section of the Margin Trading page, under the Markets list. """ return self._private('returnMarginAccountSummary') def marginBuy(self, currencyPair, rate, amount, lendingRate=None): """Places a margin buy order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". You may optionally specify a maximum lending rate using the "lendingRate" parameter. If successful, the method will return the order number and any trades immediately resulting from your order.""" return self._private('marginBuy', currencyPair=currencyPair, rate=rate, amount=amount, lendingRate=lendingRate) def marginSell(self, currencyPair, rate, amount, lendingRate=None): """Places a margin sell order in a given market. Parameters and output are the same as for the marginBuy method.""" return self._private('marginSell', currencyPair=currencyPair, rate=rate, amount=amount, lendingRate=lendingRate) def getMarginPosition(self, currencyPair): """Returns information about your margin position in a given market, specified by the "currencyPair" POST parameter. You may set "currencyPair" to "all" if you wish to fetch all of your margin positions at once. If you have no margin position in the specified market, "type" will be set to "none". "liquidationPrice" is an estimate, and does not necessarily represent the price at which an actual forced liquidation will occur. If you have no liquidation price, the value will be -1. """ return self._private('getMarginPosition', currencyPair=currencyPair) def closeMarginPosition(self, currencyPair): """Closes your margin position in a given market (specified by the "currencyPair" POST parameter) using a market order. This call will also return success if you do not have an open position in the specified market.""" return self._private('closeMarginPosition', currencyPair=currencyPair) def createLoanOffer(self, currency, amount, duration, autoRenew, lendingRate): """Creates a loan offer for a given currency. Required POST parameters are "currency", "amount", "duration", "autoRenew" (0 or 1), and "lendingRate". """ return self._private('createLoanOffer', currency=currency, amount=amount, duration=duration, autoRenew=autoRenew, lendingRate=lendingRate) def cancelLoanOffer(self, orderNumber): """Cancels a loan offer specified by the "orderNumber" POST parameter.""" return self._private('cancelLoanOffer', orderNumber=orderNumber) def returnOpenLoanOffers(self): """Returns your open loan offers for each currency. """ return self._private('returnOpenLoanOffers') def returnActiveLoans(self): """Returns your active loans for each currency.""" return self._private('returnActiveLoans') def returnLendingHistory(self, start=0, end=2**32-1, limit=None): """Returns your lending history within a time range specified by the "start" and "end" POST parameters as UNIX timestamps. "limit" may also be specified to limit the number of rows returned. """ return self._private('returnLendingHistory', start=start, end=end, limit=limit) def toggleAutoRenew(self, orderNumber): """Toggles the autoRenew setting on an active loan, specified by the "orderNumber" POST parameter. If successful, "message" will indicate the new autoRenew setting. """ return self._private('toggleAutoRenew', orderNumber=orderNumber)
class Poloniex(PoloniexPublic): '''Client to connect to Poloniex private APIs.''' class _PoloniexAuth(_requests.auth.AuthBase): '''Poloniex Request Authentication.''' def __init__(self, apikey, secret): pass def __call__(self, request): pass def __init__(self, apikey, secret): '''Initialize the Poloniex private client.''' pass @_api_wrapper def _private(self, command, **params): '''Invoke the 'command' public API with optional params.''' pass def returnBalances(self): '''Returns all of your available balances.''' pass def returnCompleteBalances(self, account=None): '''Returns all of your balances, including available balance, balance on orders, and the estimated BTC value of your balance. By default, this call is limited to your exchange account; set the "account" POST parameter to "all" to include your margin and lending accounts.''' pass def returnDepositAddresses(self): '''Returns all of your deposit addresses.''' pass def generateNewAddress(self, currency): '''Generates a new deposit address for the currency specified by the "currency" POST parameter. Only one address per currency per day may be generated, and a new address may not be generated before the previously-generated one has been used.''' pass def returnDepositsWithdrawals(self, start=0, end=2**32-1): '''Returns your deposit and withdrawal history within a range, specified by the "start" and "end" POST parameters, both of which should be given as UNIX timestamps.''' pass def returnDepositsWithdrawals(self, start=0, end=2**32-1): '''Returns your deposit history within a range, specified by the "start" and "end" POST parameters, both of which should be given as UNIX timestamps.''' pass def returnWithdrawals(self, start=0, end=2**32-1): '''Returns your withdrawal history within a range, specified by the "start" and "end" POST parameters, both of which should be given as UNIX timestamps.''' pass def returnOpenOrders(self, currencyPair='all'): '''Returns your open orders for a given market, specified by the "currencyPair" POST parameter, e.g. "BTC_XCP". Set "currencyPair" to "all" to return open orders for all markets.''' pass def returnTradeHistory(self, currencyPair='all', start=None, end=None, limit=500): '''Returns your trade history for a given market, specified by the "currencyPair" POST parameter. You may specify "all" as the currencyPair to receive your trade history for all markets. You may optionally specify a range via "start" and/or "end" POST parameters, given in UNIX timestamp format; if you do not specify a range, it will be limited to one day.''' pass def returnTradeHistoryPublic(self, currencyPair, start=None, end=None): '''Returns the past 200 trades for a given market, or up to 50,000 trades between a range specified in UNIX timestamps by the "start" and "end" GET parameters.''' pass def returnOrderTrades(self, orderNumber): '''Returns all trades involving a given order, specified by the "orderNumber" POST parameter. If no trades for the order have occurred or you specify an order that does not belong to you, you will receive an error. ''' pass def buy(self, currencyPair, rate, amount, fillOrKill=None, immediateOrCancel=None, postOnly=None): '''Places a limit buy order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". If successful, the method will return the order number. You may optionally set "fillOrKill", "immediateOrCancel", "postOnly" to 1. A fill-or-kill order will either fill in its entirety or be completely aborted. An immediate-or-cancel order can be partially or completely filled, but any portion of the order that cannot be filled immediately will be canceled rather than left on the order book. A post-only order will only be placed if no portion of it fills immediately; this guarantees you will never pay the taker fee on any part of the order that fills.''' pass def sell(self, currencyPair, rate, amount, fillOrKill=None, immediateOrCancel=None, postOnly=None): '''Places a sell order in a given market. Parameters and output are the same as for the buy method.''' pass def cancelOrder(self, orderNumber): '''Cancels an order you have placed in a given market. Required POST parameter is "orderNumber".''' pass def moveOrder(self, orderNumber, rate, amount=None, postOnly=None, immediateOrCancel=None): '''Cancels an order and places a new one of the same type in a single atomic transaction, meaning either both operations will succeed or both will fail. Required POST parameters are "orderNumber" and "rate"; you may optionally specify "amount" if you wish to change the amount of the new order. "postOnly" or "immediateOrCancel" may be specified for exchange orders, but will have no effect on margin orders. ''' pass def withdraw(self, currency, amount, address, paymentId=None): '''Immediately places a withdrawal for a given currency, with no email confirmation. In order to use this method, the withdrawal privilege must be enabled for your API key. Required POST parameters are "currency", "amount", and "address". For XMR withdrawals, you may optionally specify "paymentId".''' pass def returnFeeInfo(self): '''If you are enrolled in the maker-taker fee schedule, returns your current trading fees and trailing 30-day volume in BTC. This information is updated once every 24 hours.''' pass def returnAvailableAccountBalances(self, account=None): '''Returns your balances sorted by account. You may optionally specify the "account" POST parameter if you wish to fetch only the balances of one account. Please note that balances in your margin account may not be accessible if you have any open margin positions or orders.''' pass def returnTradableBalances(self): '''Returns your current tradable balances for each currency in each market for which margin trading is enabled. Please note that these balances may vary continually with market conditions.''' pass def transferBalance(self, currency, amount, fromAccount, toAccount): '''Transfers funds from one account to another (e.g. from your exchange account to your margin account). Required POST parameters are "currency", "amount", "fromAccount", and "toAccount".''' pass def returnMarginAccountSummary(self): '''Returns a summary of your entire margin account. This is the same information you will find in the Margin Account section of the Margin Trading page, under the Markets list. ''' pass def marginBuy(self, currencyPair, rate, amount, lendingRate=None): '''Places a margin buy order in a given market. Required POST parameters are "currencyPair", "rate", and "amount". You may optionally specify a maximum lending rate using the "lendingRate" parameter. If successful, the method will return the order number and any trades immediately resulting from your order.''' pass def marginSell(self, currencyPair, rate, amount, lendingRate=None): '''Places a margin sell order in a given market. Parameters and output are the same as for the marginBuy method.''' pass def getMarginPosition(self, currencyPair): '''Returns information about your margin position in a given market, specified by the "currencyPair" POST parameter. You may set "currencyPair" to "all" if you wish to fetch all of your margin positions at once. If you have no margin position in the specified market, "type" will be set to "none". "liquidationPrice" is an estimate, and does not necessarily represent the price at which an actual forced liquidation will occur. If you have no liquidation price, the value will be -1. ''' pass def closeMarginPosition(self, currencyPair): '''Closes your margin position in a given market (specified by the "currencyPair" POST parameter) using a market order. This call will also return success if you do not have an open position in the specified market.''' pass def createLoanOffer(self, currency, amount, duration, autoRenew, lendingRate): '''Creates a loan offer for a given currency. Required POST parameters are "currency", "amount", "duration", "autoRenew" (0 or 1), and "lendingRate". ''' pass def cancelLoanOffer(self, orderNumber): '''Cancels a loan offer specified by the "orderNumber" POST parameter.''' pass def returnOpenLoanOffers(self): '''Returns your open loan offers for each currency. ''' pass def returnActiveLoans(self): '''Returns your active loans for each currency.''' pass def returnLendingHistory(self, start=0, end=2**32-1, limit=None): '''Returns your lending history within a time range specified by the "start" and "end" POST parameters as UNIX timestamps. "limit" may also be specified to limit the number of rows returned. ''' pass def toggleAutoRenew(self, orderNumber): '''Toggles the autoRenew setting on an active loan, specified by the "orderNumber" POST parameter. If successful, "message" will indicate the new autoRenew setting. ''' pass
38
35
7
0
3
3
1
0.93
1
6
2
0
33
5
33
43
274
39
122
56
74
113
84
45
47
2
2
1
36
7,871
Aula13/poloniex
Aula13_poloniex/poloniex/exceptions.py
poloniex.exceptions.PoloniexException
class PoloniexException(Exception): """Generic Poloniex Exception.""" pass
class PoloniexException(Exception): '''Generic Poloniex Exception.''' pass
1
1
0
0
0
0
0
0.5
1
0
0
2
0
0
0
10
3
0
2
1
1
1
2
1
1
0
3
0
0
7,872
Aula13/poloniex
Aula13_poloniex/poloniex/exceptions.py
poloniex.exceptions.PoloniexCredentialsException
class PoloniexCredentialsException(PoloniexException, RuntimeError): """Missing or wrong credentials while using Trading API.""" pass
class PoloniexCredentialsException(PoloniexException, RuntimeError): '''Missing or wrong credentials while using Trading API.''' pass
1
1
0
0
0
0
0
0.5
2
0
0
0
0
0
0
11
3
0
2
1
1
1
2
1
1
0
4
0
0
7,873
Aula13/poloniex
Aula13_poloniex/poloniex/exceptions.py
poloniex.exceptions.PoloniexCommandException
class PoloniexCommandException(PoloniexException, RuntimeError): """Error in command execution.""" pass
class PoloniexCommandException(PoloniexException, RuntimeError): '''Error in command execution.''' pass
1
1
0
0
0
0
0
0.5
2
0
0
0
0
0
0
11
3
0
2
1
1
1
2
1
1
0
4
0
0
7,874
Aula13/poloniex
Aula13_poloniex/poloniex/concurrency.py
poloniex.concurrency.Semaphore
class Semaphore(object): """This class implements semaphore objects. Semaphores manage a counter representing the number of release() calls minus the number of acquire() calls, plus an initial value. The acquire() method blocks if necessary until it can return without making the counter negative. If not given, value defaults to 1. This is a replica of the Python3 implementation with a convenience clear method. The reason this was duplicated rather than subclasses is because on Python2, the necessary value attributes are hard-private instead of soft-private. """ # After Tim Peters' semaphore class, but not quite the same (no maximum) def __init__(self, value=1): if value < 0: raise ValueError("semaphore initial value must be >= 0") self._cond = threading.Condition(threading.Lock()) self._initial = self._value = value def acquire(self, blocking=True, timeout=None): """Acquire a semaphore, decrementing the internal counter by one. When invoked without arguments: if the internal counter is larger than zero on entry, decrement it by one and return immediately. If it is zero on entry, block, waiting until some other thread has called release() to make it larger than zero. This is done with proper interlocking so that if multiple acquire() calls are blocked, release() will wake exactly one of them up. The implementation may pick one at random, so the order in which blocked threads are awakened should not be relied on. There is no return value in this case. When invoked with blocking set to true, do the same thing as when called without arguments, and return true. When invoked with blocking set to false, do not block. If a call without an argument would block, return false immediately; otherwise, do the same thing as when called without arguments, and return true. When invoked with a timeout other than None, it will block for at most timeout seconds. If acquire does not complete successfully in that interval, return false. Return true otherwise. """ if not blocking and timeout is not None: raise ValueError("can't specify timeout for non-blocking acquire") rc = False endtime = None with self._cond: while self._value == 0: if not blocking: break if timeout is not None: if endtime is None: endtime = threading._time() + timeout else: timeout = endtime - threading._time() if timeout <= 0: break self._cond.wait(timeout) else: self._value -= 1 rc = True return rc __enter__ = acquire def release(self): """Release a semaphore, incrementing the internal counter by one. When the counter is zero on entry and another thread is waiting for it to become larger than zero again, wake up that thread. """ with self._cond: self._value += 1 self._cond.notify() def clear(self): """Release the semaphore of all of its bounds, setting the internal counter back to its original bind limit. Notify an equivalent amount of threads that they can run.""" with self._cond: to_notify = self._initial - self._value self._value = self._initial self._cond.notify(to_notify) def __exit__(self, t, v, tb): self.release()
class Semaphore(object): '''This class implements semaphore objects. Semaphores manage a counter representing the number of release() calls minus the number of acquire() calls, plus an initial value. The acquire() method blocks if necessary until it can return without making the counter negative. If not given, value defaults to 1. This is a replica of the Python3 implementation with a convenience clear method. The reason this was duplicated rather than subclasses is because on Python2, the necessary value attributes are hard-private instead of soft-private. ''' def __init__(self, value=1): pass def acquire(self, blocking=True, timeout=None): '''Acquire a semaphore, decrementing the internal counter by one. When invoked without arguments: if the internal counter is larger than zero on entry, decrement it by one and return immediately. If it is zero on entry, block, waiting until some other thread has called release() to make it larger than zero. This is done with proper interlocking so that if multiple acquire() calls are blocked, release() will wake exactly one of them up. The implementation may pick one at random, so the order in which blocked threads are awakened should not be relied on. There is no return value in this case. When invoked with blocking set to true, do the same thing as when called without arguments, and return true. When invoked with blocking set to false, do not block. If a call without an argument would block, return false immediately; otherwise, do the same thing as when called without arguments, and return true. When invoked with a timeout other than None, it will block for at most timeout seconds. If acquire does not complete successfully in that interval, return false. Return true otherwise. ''' pass def release(self): '''Release a semaphore, incrementing the internal counter by one. When the counter is zero on entry and another thread is waiting for it to become larger than zero again, wake up that thread. ''' pass def clear(self): '''Release the semaphore of all of its bounds, setting the internal counter back to its original bind limit. Notify an equivalent amount of threads that they can run.''' pass def __exit__(self, t, v, tb): pass
6
4
12
0
7
5
2
0.9
1
2
0
0
5
3
5
5
80
6
39
12
33
35
38
12
32
7
1
5
12
7,875
Aula13/poloniex
Aula13_poloniex/poloniex/concurrency.py
poloniex.concurrency.RecurrentTimer
class RecurrentTimer(Timer): """A repetitive Timer implementation. See: https://hg.python.org/cpython/file/2.7/Lib/threading.py#l1079 """ def run(self): while not self.finished.is_set(): self.finished.wait(self.interval) self.function(*self.args, **self.kwargs) # this should never be reached with a _thread implementation # but we leave it here just in case we're a custom # Python implementation that is messing around with _thread # and isn't up to standard, so we don't have an infinite # loop with a signal handler. self.finished.set()
class RecurrentTimer(Timer): '''A repetitive Timer implementation. See: https://hg.python.org/cpython/file/2.7/Lib/threading.py#l1079 ''' def run(self): pass
2
1
11
1
5
5
2
1.33
1
0
0
0
1
0
1
1
16
2
6
2
4
8
6
2
4
2
1
1
2
7,876
Aula13/poloniex
Aula13_poloniex/poloniex/poloniex.py
poloniex.poloniex.PoloniexPublic
class PoloniexPublic(object): """Client to connect to Poloniex public APIs""" def __init__(self, public_url=_PUBLIC_URL, limit=6, session_class=_requests.Session, session=None, startup_lock=None, semaphore=None, timer=None): """Initialize Poloniex client.""" self._public_url = public_url self.startup_lock = startup_lock or _threading.RLock() self.semaphore = semaphore or Semaphore(limit) self.timer = timer or RecurrentTimer(1.0, self.semaphore.clear) self.session = session or session_class() _atexit.register(self.__del__) def __del__(self): self.timer.cancel() if self.timer.ident is not None: # timer was started self.timer.join() @_api_wrapper def _public(self, command, **params): """Invoke the 'command' public API with optional params.""" params['command'] = command response = self.session.get(self._public_url, params=params) return response def returnTicker(self): """Returns the ticker for all markets.""" return self._public('returnTicker') def return24hVolume(self): """Returns the 24-hour volume for all markets, plus totals for primary currencies.""" return self._public('return24hVolume') def returnOrderBook(self, currencyPair='all', depth='50'): """Returns the order book for a given market, as well as a sequence number for use with the Push API and an indicator specifying whether the market is frozen. You may set currencyPair to "all" to get the order books of all markets.""" return self._public('returnOrderBook', currencyPair=currencyPair, depth=depth) def returnTradeHistory(self, currencyPair, start=None, end=None): """Returns the past 200 trades for a given market, or up to 50,000 trades between a range specified in UNIX timestamps by the "start" and "end" GET parameters.""" return self._public('returnTradeHistory', currencyPair=currencyPair, start=start, end=end) def returnChartData(self, currencyPair, period, start=0, end=2**32-1): """Returns candlestick chart data. Required GET parameters are "currencyPair", "period" (candlestick period in seconds; valid values are 300, 900, 1800, 7200, 14400, and 86400), "start", and "end". "Start" and "end" are given in UNIX timestamp format and used to specify the date range for the data returned.""" return self._public('returnChartData', currencyPair=currencyPair, period=period, start=start, end=end) def returnCurrencies(self): """Returns information about currencies.""" return self._public('returnCurrencies') def returnLoanOrders(self, currency): """Returns the list of loan offers and demands for a given currency, specified by the "currency" GET parameter.""" return self._public('returnLoanOrders', currency=currency)
class PoloniexPublic(object): '''Client to connect to Poloniex public APIs''' def __init__(self, public_url=_PUBLIC_URL, limit=6, session_class=_requests.Session, session=None, startup_lock=None, semaphore=None, timer=None): '''Initialize Poloniex client.''' pass def __del__(self): pass @_api_wrapper def _public(self, command, **params): '''Invoke the 'command' public API with optional params.''' pass def returnTicker(self): '''Returns the ticker for all markets.''' pass def return24hVolume(self): '''Returns the 24-hour volume for all markets, plus totals for primary currencies.''' pass def returnOrderBook(self, currencyPair='all', depth='50'): '''Returns the order book for a given market, as well as a sequence number for use with the Push API and an indicator specifying whether the market is frozen. You may set currencyPair to "all" to get the order books of all markets.''' pass def returnTradeHistory(self, currencyPair, start=None, end=None): '''Returns the past 200 trades for a given market, or up to 50,000 trades between a range specified in UNIX timestamps by the "start" and "end" GET parameters.''' pass def returnChartData(self, currencyPair, period, start=0, end=2**32-1): '''Returns candlestick chart data. Required GET parameters are "currencyPair", "period" (candlestick period in seconds; valid values are 300, 900, 1800, 7200, 14400, and 86400), "start", and "end". "Start" and "end" are given in UNIX timestamp format and used to specify the date range for the data returned.''' pass def returnCurrencies(self): '''Returns information about currencies.''' pass def returnLoanOrders(self, currency): '''Returns the list of loan offers and demands for a given currency, specified by the "currency" GET parameter.''' pass
12
10
6
0
4
2
1
0.59
1
3
2
1
10
5
10
10
69
11
37
21
22
22
30
17
19
2
1
1
11
7,877
AustralianSynchrotron/lightflow
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AustralianSynchrotron_lightflow/tests/test_base_task.py
tests.test_base_task.test_run_handles_invalid_result.InvalidResultTask
class InvalidResultTask(BaseTask): def run(self, *args, **kwargs): return 'whoops'
class InvalidResultTask(BaseTask): def run(self, *args, **kwargs): pass
2
0
2
0
2
0
1
0
1
0
0
0
1
0
1
25
3
0
3
2
1
0
3
2
1
1
1
0
1
7,878
AustralianSynchrotron/lightflow
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AustralianSynchrotron_lightflow/tests/test_base_task.py
tests.test_base_task.test_run_handles_action_response.Task
class Task(BaseTask): def run(self, *args, **kwargs): return run_result
class Task(BaseTask): def run(self, *args, **kwargs): pass
2
0
2
0
2
0
1
0
1
0
0
0
1
0
1
25
3
0
3
2
1
0
3
2
1
1
1
0
1
7,879
AustralianSynchrotron/lightflow
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AustralianSynchrotron_lightflow/tests/test_base_task.py
tests.test_base_task.test_run_calls_callback_finally_on_stop_task.StoppingTask
class StoppingTask(BaseTask): def run(self, *args, **kwargs): raise StopTask()
class StoppingTask(BaseTask): def run(self, *args, **kwargs): pass
2
0
2
0
2
0
1
0
1
1
1
0
1
0
1
25
3
0
3
2
1
0
3
2
1
1
1
0
1
7,880
AustralianSynchrotron/lightflow
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AustralianSynchrotron_lightflow/tests/test_base_task.py
tests.test_base_task.test_run_calls_callback_finally_on_error.FailingTask
class FailingTask(BaseTask): def run(self, *args, **kwargs): raise Exception()
class FailingTask(BaseTask): def run(self, *args, **kwargs): pass
2
0
2
0
2
0
1
0
1
1
0
0
1
0
1
25
3
0
3
2
1
0
3
2
1
1
1
0
1
7,881
AustralianSynchrotron/lightflow
/Users/umroot/Documents/PhD_works/PhD-Core-Contents/Class-level-dataset-curation/data/git_repos_for_analysis/AustralianSynchrotron_lightflow/tests/test_base_task.py
tests.test_base_task.test_run_calls_callback_finally_on_abort_workflow.AbortingTask
class AbortingTask(BaseTask): def run(self, *args, **kwargs): raise AbortWorkflow()
class AbortingTask(BaseTask): def run(self, *args, **kwargs): pass
2
0
2
0
2
0
1
0
1
1
1
0
1
0
1
25
3
0
3
2
1
0
3
2
1
1
1
0
1
7,882
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/exceptions.py
lightflow.models.exceptions.DataInvalidIndex
class DataInvalidIndex(RuntimeError): pass
class DataInvalidIndex(RuntimeError): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
11
2
0
2
1
1
0
2
1
1
0
4
0
0
7,883
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/exceptions.py
lightflow.models.exceptions.EventTypeUnknown
class EventTypeUnknown(RuntimeError): pass
class EventTypeUnknown(RuntimeError): pass
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
11
2
0
2
1
1
0
2
1
1
0
4
0
0
7,884
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/signal.py
lightflow.models.signal.SignalConnection
class SignalConnection: """ The connection to the redis signal broker database. Args: host (str): The host of the redis database. port (int): The port of the redis database. database (int): The number of the database. password (str): Optional password for the redis database. auto_connect (bool): Set to True to connect to the redis broker database. polling_time (float): The polling time for signal requests in seconds. """ def __init__(self, host, port, database, *, password=None, auto_connect=False, polling_time=0.5): self._host = host self._port = port self._database = database self._password = password self._polling_time = polling_time self._connection = None if auto_connect: self.connect() @property def is_connected(self): """ Returns the status of the signal connection. """ return self._connection is not None @property def connection(self): """ Returns the connection object or None if the connection is not open. """ return self._connection @property def polling_time(self): """ Returns the polling time for signal requests in seconds. """ return self._polling_time def connect(self): """ Connects to the redis database. """ self._connection = StrictRedis( host=self._host, port=self._port, db=self._database, password=self._password)
class SignalConnection: ''' The connection to the redis signal broker database. Args: host (str): The host of the redis database. port (int): The port of the redis database. database (int): The number of the database. password (str): Optional password for the redis database. auto_connect (bool): Set to True to connect to the redis broker database. polling_time (float): The polling time for signal requests in seconds. ''' def __init__(self, host, port, database, *, password=None, auto_connect=False, polling_time=0.5): pass @property def is_connected(self): ''' Returns the status of the signal connection. ''' pass @property def connection(self): ''' Returns the connection object or None if the connection is not open. ''' pass @property def polling_time(self): ''' Returns the polling time for signal requests in seconds. ''' pass def connection(self): ''' Connects to the redis database. ''' pass
9
5
5
0
4
1
1
0.5
0
0
0
0
5
6
5
5
45
6
26
16
16
13
18
12
12
2
0
1
6
7,885
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/task_data.py
lightflow.models.task_data.TaskData
class TaskData: """ This class represents a single dataset that is passed between tasks. It behaves like a dictionary but also contains a history of all tasks that have contributed to this dataset. Args: data (dict): A dictionary with the initial data that should be stored. task_history (list): A list of task names that have contributed to this data. """ def __init__(self, data=None, *, task_history=None): self._data = data if data is not None else {} self._task_history = task_history if task_history is not None else [] def add_task_history(self, task_name): """ Add a task name to the list of tasks that have contributed to this dataset. Args: task_name (str): The name of the task that contributed. """ self._task_history.append(task_name) @property def data(self): """ Return the data of this dataset. """ return self._data @property def task_history(self): """ Return the list of task names that have contributed to this dataset. """ return self._task_history def get(self, key, default=None): """ Access a single value in the dataset by its key Args: key (str): The key under which the value is stored. default: Return this value if the key cannot be found. Returns: object: The value that is stored under the specified key. """ return self._data.get(key, default) def set(self, key, value): """ Change the value of a field in the dataset. Args: key (str): The key pointing to the value that should be changed. value: The new value that should be set. """ self._data[key] = value def merge(self, dataset): """ Merge the specified dataset on top of the existing data. This replaces all values in the existing dataset with the values from the given dataset. Args: dataset (TaskData): A reference to the TaskData object that should be merged on top of the existing object. """ def merge_data(source, dest): for key, value in source.items(): if isinstance(value, dict): merge_data(value, dest.setdefault(key, {})) else: dest[key] = value return dest merge_data(dataset.data, self._data) for h in dataset.task_history: if h not in self._task_history: self._task_history.append(h) def __deepcopy__(self, memo): """ Copy the object. """ return TaskData(data=deepcopy(self._data, memo), task_history=self._task_history[:]) def __getitem__(self, item): """ Access a single value in the dataset by its key. """ return self._data[item] def __setitem__(self, key, value): """ Change the value of a field in the dataset. """ self._data[key] = value def __delitem__(self, key): """ Delete a field in the dataset. """ del self._data[key] def __contains__(self, item): """ Checks whether the item is present in the dataset """ return item in self._data def __repr__(self): """ Return a representation of the object. """ return '{}({})'.format(self.__class__.__name__, self._data) def __str__(self): """ Return a string of the data. """ return str(self._data)
class TaskData: ''' This class represents a single dataset that is passed between tasks. It behaves like a dictionary but also contains a history of all tasks that have contributed to this dataset. Args: data (dict): A dictionary with the initial data that should be stored. task_history (list): A list of task names that have contributed to this data. ''' def __init__(self, data=None, *, task_history=None): pass def add_task_history(self, task_name): ''' Add a task name to the list of tasks that have contributed to this dataset. Args: task_name (str): The name of the task that contributed. ''' pass @property def data(self): ''' Return the data of this dataset. ''' pass @property def task_history(self): ''' Return the list of task names that have contributed to this dataset. ''' pass def get(self, key, default=None): ''' Access a single value in the dataset by its key Args: key (str): The key under which the value is stored. default: Return this value if the key cannot be found. Returns: object: The value that is stored under the specified key. ''' pass def set(self, key, value): ''' Change the value of a field in the dataset. Args: key (str): The key pointing to the value that should be changed. value: The new value that should be set. ''' pass def merge(self, dataset): ''' Merge the specified dataset on top of the existing data. This replaces all values in the existing dataset with the values from the given dataset. Args: dataset (TaskData): A reference to the TaskData object that should be merged on top of the existing object. ''' pass def merge_data(source, dest): pass def __deepcopy__(self, memo): ''' Copy the object. ''' pass def __getitem__(self, item): ''' Access a single value in the dataset by its key. ''' pass def __setitem__(self, key, value): ''' Change the value of a field in the dataset. ''' pass def __delitem__(self, key): ''' Delete a field in the dataset. ''' pass def __contains__(self, item): ''' Checks whether the item is present in the dataset ''' pass def __repr__(self): ''' Return a representation of the object. ''' pass def __str__(self): ''' Return a string of the data. ''' pass
18
14
6
1
3
2
1
0.91
0
2
0
0
14
2
14
14
105
23
43
22
25
39
39
20
23
3
0
2
21
7,886
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/task_data.py
lightflow.models.task_data.MultiTaskData
class MultiTaskData: """ Manages multiple TaskData datasets and their aliases. This class implements the data object that is being passed between tasks. It consists of one or more TaskData datasets in order to accommodate multiple inputs to a single task. Each dataset can be accessed by its index or by one or more aliases. There is a default dataset, which is used whenever the user does not specify the exact dataset to work with. Args: dataset (TaskData): An initial TaskData dataset. aliases (list): A list of aliases for the initial dataset. """ def __init__(self, *, dataset=None, aliases=None): self._datasets = [] if dataset is None else [dataset] self._aliases = {} if aliases is None else {a: 0 for a in aliases} self._default_index = 0 @property def default_index(self): """ Return the index of the default dataset. """ return self._default_index @property def default_dataset(self): """ Return the default dataset. Returns: TaskData: A reference to the default dataset. """ return self.get_by_index(self._default_index) def add_dataset(self, task_name, dataset=None, *, aliases=None): """ Add a new dataset to the MultiTaskData. Args: task_name (str): The name of the task from which the dataset was received. dataset (TaskData): The dataset that should be added. aliases (list): A list of aliases that should be registered with the dataset. """ self._datasets.append(dataset if dataset is not None else TaskData()) last_index = len(self._datasets) - 1 self._aliases[task_name] = last_index if aliases is not None: for alias in aliases: self._aliases[alias] = last_index if len(self._datasets) == 1: self._default_index = 0 def add_alias(self, alias, index): """ Add an alias pointing to the specified index. Args: alias (str): The alias that should point to the given index. index (int): The index of the dataset for which an alias should be added. Raises: DataInvalidIndex: If the index does not represent a valid dataset. """ if index >= len(self._datasets): raise DataInvalidIndex('A dataset with index {} does not exist'.format(index)) self._aliases[alias] = index def flatten(self, in_place=True): """ Merge all datasets into a single dataset. The default dataset is the last dataset to be merged, as it is considered to be the primary source of information and should overwrite all existing fields with the same key. Args: in_place (bool): Set to ``True`` to replace the existing datasets with the merged one. If set to ``False``, will return a new MultiTaskData object containing the merged dataset. Returns: MultiTaskData: If the in_place flag is set to False. """ new_dataset = TaskData() for i, dataset in enumerate(self._datasets): if i != self._default_index: new_dataset.merge(dataset) new_dataset.merge(self.default_dataset) # point all aliases to the new, single dataset new_aliases = {alias: 0 for alias, _ in self._aliases.items()} # replace existing datasets or return a new MultiTaskData object if in_place: self._datasets = [new_dataset] self._aliases = new_aliases self._default_index = 0 else: return MultiTaskData(dataset=new_dataset, aliases=list(new_aliases.keys())) def set_default_by_alias(self, alias): """ Set the default dataset by its alias. After changing the default dataset, all calls without explicitly specifying the dataset by index or alias will be redirected to this dataset. Args: alias (str): The alias of the dataset that should be made the default. Raises: DataInvalidAlias: If the alias does not represent a valid dataset. """ if alias not in self._aliases: raise DataInvalidAlias('A dataset with alias {} does not exist'.format(alias)) self._default_index = self._aliases[alias] def set_default_by_index(self, index): """ Set the default dataset by its index. After changing the default dataset, all calls without explicitly specifying the dataset by index or alias will be redirected to this dataset. Args: index (int): The index of the dataset that should be made the default. Raises: DataInvalidIndex: If the index does not represent a valid dataset. """ if index >= len(self._datasets): raise DataInvalidIndex('A dataset with index {} does not exist'.format(index)) self._default_index = index def get_by_alias(self, alias): """ Return a dataset by its alias. Args: alias (str): The alias of the dataset that should be returned. Raises: DataInvalidAlias: If the alias does not represent a valid dataset. """ if alias not in self._aliases: raise DataInvalidAlias('A dataset with alias {} does not exist'.format(alias)) return self.get_by_index(self._aliases[alias]) def get_by_index(self, index): """ Return a dataset by its index. Args: index (int): The index of the dataset that should be returned. Raises: DataInvalidIndex: If the index does not represent a valid dataset. """ if index >= len(self._datasets): raise DataInvalidIndex('A dataset with index {} does not exist'.format(index)) return self._datasets[index] def add_task_history(self, task_name): """ Add a task name to the list of tasks that have contributed to all datasets. Args: task_name (str): The name of the task that contributed. """ for dataset in self._datasets: dataset.add_task_history(task_name) def __getitem__(self, item): """ Access a single value in the default dataset by its key. """ return self.default_dataset[item] def __setitem__(self, key, value): """ Change the value of a field in the default dataset. """ self.default_dataset[key] = value def __delitem__(self, key): """ Delete a field in the default dataset. """ del self.default_dataset[key] def __contains__(self, item): """ Checks whether the item is present in the dataset """ return item in self.default_dataset def __call__(self, alias): """ Shorthand notation for accessing a dataset by its alias. """ return self.get_by_alias(alias) def __iter__(self): """ Forward iteration requests to the internal list of datasets. """ return iter(self._datasets)
class MultiTaskData: ''' Manages multiple TaskData datasets and their aliases. This class implements the data object that is being passed between tasks. It consists of one or more TaskData datasets in order to accommodate multiple inputs to a single task. Each dataset can be accessed by its index or by one or more aliases. There is a default dataset, which is used whenever the user does not specify the exact dataset to work with. Args: dataset (TaskData): An initial TaskData dataset. aliases (list): A list of aliases for the initial dataset. ''' def __init__(self, *, dataset=None, aliases=None): pass @property def default_index(self): ''' Return the index of the default dataset. ''' pass @property def default_dataset(self): ''' Return the default dataset. Returns: TaskData: A reference to the default dataset. ''' pass def add_dataset(self, task_name, dataset=None, *, aliases=None): ''' Add a new dataset to the MultiTaskData. Args: task_name (str): The name of the task from which the dataset was received. dataset (TaskData): The dataset that should be added. aliases (list): A list of aliases that should be registered with the dataset. ''' pass def add_alias(self, alias, index): ''' Add an alias pointing to the specified index. Args: alias (str): The alias that should point to the given index. index (int): The index of the dataset for which an alias should be added. Raises: DataInvalidIndex: If the index does not represent a valid dataset. ''' pass def flatten(self, in_place=True): ''' Merge all datasets into a single dataset. The default dataset is the last dataset to be merged, as it is considered to be the primary source of information and should overwrite all existing fields with the same key. Args: in_place (bool): Set to ``True`` to replace the existing datasets with the merged one. If set to ``False``, will return a new MultiTaskData object containing the merged dataset. Returns: MultiTaskData: If the in_place flag is set to False. ''' pass def set_default_by_alias(self, alias): ''' Set the default dataset by its alias. After changing the default dataset, all calls without explicitly specifying the dataset by index or alias will be redirected to this dataset. Args: alias (str): The alias of the dataset that should be made the default. Raises: DataInvalidAlias: If the alias does not represent a valid dataset. ''' pass def set_default_by_index(self, index): ''' Set the default dataset by its index. After changing the default dataset, all calls without explicitly specifying the dataset by index or alias will be redirected to this dataset. Args: index (int): The index of the dataset that should be made the default. Raises: DataInvalidIndex: If the index does not represent a valid dataset. ''' pass def get_by_alias(self, alias): ''' Return a dataset by its alias. Args: alias (str): The alias of the dataset that should be returned. Raises: DataInvalidAlias: If the alias does not represent a valid dataset. ''' pass def get_by_index(self, index): ''' Return a dataset by its index. Args: index (int): The index of the dataset that should be returned. Raises: DataInvalidIndex: If the index does not represent a valid dataset. ''' pass def add_task_history(self, task_name): ''' Add a task name to the list of tasks that have contributed to all datasets. Args: task_name (str): The name of the task that contributed. ''' pass def __getitem__(self, item): ''' Access a single value in the default dataset by its key. ''' pass def __setitem__(self, key, value): ''' Change the value of a field in the default dataset. ''' pass def __delitem__(self, key): ''' Delete a field in the default dataset. ''' pass def __contains__(self, item): ''' Checks whether the item is present in the dataset ''' pass def __call__(self, alias): ''' Shorthand notation for accessing a dataset by its alias. ''' pass def __iter__(self): ''' Forward iteration requests to the internal list of datasets. ''' pass
20
17
10
2
4
4
2
1.16
0
5
3
0
17
3
17
17
194
47
68
29
48
79
65
27
47
5
0
2
32
7,887
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/task_context.py
lightflow.models.task_context.TaskContext
class TaskContext: """ This class contains information about the context the task is running in. """ def __init__(self, task_name, dag_name, workflow_name, workflow_id, worker_hostname): """ Initialize the task context object. Args: task_name (str): The name of the task. dag_name (str): The name of the DAG the task was started from. workflow_name (str): The name of the workflow the task was started from. workflow_id (str): The id of the workflow this task is member of. worker_hostname (str): The name of the worker executing this task. """ self.task_name = task_name self.dag_name = dag_name self.workflow_name = workflow_name self.workflow_id = workflow_id self.worker_hostname = worker_hostname def to_dict(self): """ Return the task context content as a dictionary. """ return { 'task_name': self.task_name, 'dag_name': self.dag_name, 'workflow_name': self.workflow_name, 'workflow_id': self.workflow_id, 'worker_hostname': self.worker_hostname }
class TaskContext: ''' This class contains information about the context the task is running in. ''' def __init__(self, task_name, dag_name, workflow_name, workflow_id, worker_hostname): ''' Initialize the task context object. Args: task_name (str): The name of the task. dag_name (str): The name of the DAG the task was started from. workflow_name (str): The name of the workflow the task was started from. workflow_id (str): The id of the workflow this task is member of. worker_hostname (str): The name of the worker executing this task. ''' pass def to_dict(self): ''' Return the task context content as a dictionary. ''' pass
3
3
12
1
7
5
1
0.67
0
0
0
0
2
5
2
2
28
3
15
8
12
10
9
8
6
1
0
0
2
7,888
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/task.py
lightflow.models.task.TaskStatus
class TaskStatus: """ Constants for flagging the status of the task after it completed running. """ Success = 1 Stopped = 2 Aborted = 3 Error = 4
class TaskStatus: ''' Constants for flagging the status of the task after it completed running. ''' pass
1
1
0
0
0
0
0
0.2
0
0
0
0
0
0
0
0
6
0
5
5
4
1
5
5
4
0
0
0
0
7,889
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/task.py
lightflow.models.task.TaskState
class TaskState: """ Constants for flagging the current state of the task. """ Init = 1 Waiting = 2 Running = 3 Completed = 4 Stopped = 5 Aborted = 6
class TaskState: ''' Constants for flagging the current state of the task. ''' pass
1
1
0
0
0
0
0
0.14
0
0
0
0
0
0
0
0
8
0
7
7
6
1
7
7
6
0
0
0
0
7,890
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/task.py
lightflow.models.task.BaseTask
class BaseTask: """ The base class for all tasks. Tasks should inherit from this class and implement the run() method. """ def __init__(self, name, *, queue=DefaultJobQueueName.Task, callback_init=None, callback_finally=None, force_run=False, propagate_skip=True): """ Initialize the base task. The dag_name and workflow_name attributes are filled at runtime. Args: name (str): The name of the task. queue (str): Name of the queue the task should be scheduled to. callback_init (callable): A callable that is called shortly before the task is run. The definition is: def (data, store, signal, context) where data the task data, store the workflow data store, signal the task signal and context the task context. callback_finally (callable): A callable that is always called at the end of a task, regardless whether it completed successfully, was stopped or was aborted. The definition is: def (status, data, store, signal, context) where status specifies whether the task was success: TaskStatus.Success stopped: TaskStatus.Stopped aborted: TaskStatus.Aborted raised exception: TaskStatus.Error data the task data, store the workflow data store, signal the task signal and context the task context. force_run (bool): Run the task even if it is flagged to be skipped. propagate_skip (bool): Propagate the skip flag to the next task. """ self._name = name self._queue = queue self._callback_init = callback_init self._callback_finally = callback_finally self._force_run = force_run self._propagate_skip = propagate_skip self._skip = False self._state = TaskState.Init self._celery_result = None self.workflow_name = None self.dag_name = None @property def name(self): """ Returns the name of the task. """ return self._name @property def queue(self): """ Returns the queue the task should be scheduled to. """ return self._queue @property def has_to_run(self): """ Returns whether the task has to run, even if the DAG would skip it. """ return self._force_run @property def propagate_skip(self): """ Returns whether the skip flag should be propagated to the successor tasks. """ return self._propagate_skip @property def is_waiting(self): """ Internal state: returns whether the task is waiting in the DAG to be run. """ return self._state == TaskState.Waiting @property def is_running(self): """ Internal state: returns whether the task is currently running. """ return self._state == TaskState.Running @property def is_completed(self): """ Internal state: returns whether the task has completed successfully. """ return self._state == TaskState.Completed @property def is_stopped(self): """ Internal state: returns whether the task was stopped. """ return self._state == TaskState.Stopped @property def is_aborted(self): """ Internal state: returns whether the task was aborted. """ return self._state == TaskState.Aborted @property def is_skipped(self): """ Internal state: returns whether the task was skipped. """ return self._skip @is_skipped.setter def is_skipped(self, value): """ Set whether the task has been skipped. Args: value (bool): Set to True if the tasked was skipped. """ self._skip = value @property def state(self): """ Returns the internal state of the task. """ return self._state @state.setter def state(self, state): """ Sets the internal state of the task. Args: state (TaskState): The new state of the task """ self._state = state @property def celery_pending(self): """ Celery state: returns whether the task is queued. """ if self.has_celery_result: return self.celery_result.state == "PENDING" else: return False @property def celery_completed(self): """ Celery state: returns whether the execution of the task has completed. """ if self.has_celery_result: return self.celery_result.ready() else: return False @property def celery_failed(self): """ Celery state: returns whether the execution of the task failed. """ if self.has_celery_result: return self.celery_result.failed() else: return False @property def celery_state(self): """ Returns the current celery state of the task as a string. """ if self.has_celery_result: return self.celery_result.state else: return "NOT_QUEUED" @property def has_celery_result(self): """ Returns whether the task has a result from celery. This indicates that the task is either queued, running or finished. """ return self.celery_result is not None @property def celery_result(self): """ Returns the celery result object for this task. """ return self._celery_result @celery_result.setter def celery_result(self, result): """ Sets the celery result object for this task. Args: result (AsyncResult): The result of the celery queuing call. """ self._celery_result = result def clear_celery_result(self): """ Removes the task's celery result from the result backend. """ if self.has_celery_result: self._celery_result.forget() def _run(self, data, store, signal, context, *, success_callback=None, stop_callback=None, abort_callback=None): """ The internal run method that decorates the public run method. This method makes sure data is being passed to and from the task. Args: data (MultiTaskData): The data object that has been passed from the predecessor task. store (DataStoreDocument): The persistent data store object that allows the task to store data for access across the current workflow run. signal (TaskSignal): The signal object for tasks. It wraps the construction and sending of signals into easy to use methods. context (TaskContext): The context in which the tasks runs. success_callback: This function is called when the task completed successfully stop_callback: This function is called when a StopTask exception was raised. abort_callback: This function is called when an AbortWorkflow exception was raised. Raises: TaskReturnActionInvalid: If the return value of the task is not an Action object. Returns: Action: An Action object containing the data that should be passed on to the next task and optionally a list of successor tasks that should be executed. """ if data is None: data = MultiTaskData() data.add_dataset(self._name) try: if self._callback_init is not None: self._callback_init(data, store, signal, context) result = self.run(data, store, signal, context) if self._callback_finally is not None: self._callback_finally(TaskStatus.Success, data, store, signal, context) if success_callback is not None: success_callback() # the task should be stopped and optionally all successor tasks skipped except StopTask as err: if self._callback_finally is not None: self._callback_finally(TaskStatus.Stopped, data, store, signal, context) if stop_callback is not None: stop_callback(exc=err) result = Action(data, limit=[]) if err.skip_successors else None # the workflow should be stopped immediately except AbortWorkflow as err: if self._callback_finally is not None: self._callback_finally(TaskStatus.Aborted, data, store, signal, context) if abort_callback is not None: abort_callback(exc=err) result = None signal.stop_workflow() # catch any other exception, call the finally callback, then re-raise except: if self._callback_finally is not None: self._callback_finally(TaskStatus.Error, data, store, signal, context) signal.stop_workflow() raise # handle the returned data (either implicitly or as an returned Action object) by # flattening all, possibly modified, input datasets in the MultiTask data down to # a single output dataset. if result is None: data.flatten(in_place=True) data.add_task_history(self.name) return Action(data) else: if not isinstance(result, Action): raise TaskReturnActionInvalid() result.data.flatten(in_place=True) result.data.add_task_history(self.name) return result def run(self, data, store, signal, context, **kwargs): """ The main run method of a task. Implement this method in inherited classes. Args: data (MultiTaskData): The data object that has been passed from the predecessor task. store (DataStoreDocument): The persistent data store object that allows the task to store data for access across the current workflow run. signal (TaskSignal): The signal object for tasks. It wraps the construction and sending of signals into easy to use methods. context (TaskContext): The context in which the tasks runs. Returns: Action: An Action object containing the data that should be passed on to the next task and optionally a list of successor tasks that should be executed. """ pass
class BaseTask: ''' The base class for all tasks. Tasks should inherit from this class and implement the run() method. ''' def __init__(self, name, *, queue=DefaultJobQueueName.Task, callback_init=None, callback_finally=None, force_run=False, propagate_skip=True): ''' Initialize the base task. The dag_name and workflow_name attributes are filled at runtime. Args: name (str): The name of the task. queue (str): Name of the queue the task should be scheduled to. callback_init (callable): A callable that is called shortly before the task is run. The definition is: def (data, store, signal, context) where data the task data, store the workflow data store, signal the task signal and context the task context. callback_finally (callable): A callable that is always called at the end of a task, regardless whether it completed successfully, was stopped or was aborted. The definition is: def (status, data, store, signal, context) where status specifies whether the task was success: TaskStatus.Success stopped: TaskStatus.Stopped aborted: TaskStatus.Aborted raised exception: TaskStatus.Error data the task data, store the workflow data store, signal the task signal and context the task context. force_run (bool): Run the task even if it is flagged to be skipped. propagate_skip (bool): Propagate the skip flag to the next task. ''' pass @property def name(self): ''' Returns the name of the task. ''' pass @property def queue(self): ''' Returns the queue the task should be scheduled to. ''' pass @property def has_to_run(self): ''' Returns whether the task has to run, even if the DAG would skip it. ''' pass @property def propagate_skip(self): ''' Returns whether the skip flag should be propagated to the successor tasks. ''' pass @property def is_waiting(self): ''' Internal state: returns whether the task is waiting in the DAG to be run. ''' pass @property def is_running(self): ''' Internal state: returns whether the task is currently running. ''' pass @property def is_completed(self): ''' Internal state: returns whether the task has completed successfully. ''' pass @property def is_stopped(self): ''' Internal state: returns whether the task was stopped. ''' pass @property def is_aborted(self): ''' Internal state: returns whether the task was aborted. ''' pass @property def is_skipped(self): ''' Internal state: returns whether the task was skipped. ''' pass @is_skipped.setter def is_skipped(self): ''' Set whether the task has been skipped. Args: value (bool): Set to True if the tasked was skipped. ''' pass @property def state(self): ''' Returns the internal state of the task. ''' pass @state.setter def state(self): ''' Sets the internal state of the task. Args: state (TaskState): The new state of the task ''' pass @property def celery_pending(self): ''' Celery state: returns whether the task is queued. ''' pass @property def celery_completed(self): ''' Celery state: returns whether the execution of the task has completed. ''' pass @property def celery_failed(self): ''' Celery state: returns whether the execution of the task failed. ''' pass @property def celery_state(self): ''' Returns the current celery state of the task as a string. ''' pass @property def has_celery_result(self): ''' Returns whether the task has a result from celery. This indicates that the task is either queued, running or finished. ''' pass @property def celery_result(self): ''' Returns the celery result object for this task. ''' pass @celery_result.setter def celery_result(self): ''' Sets the celery result object for this task. Args: result (AsyncResult): The result of the celery queuing call. ''' pass def clear_celery_result(self): ''' Removes the task's celery result from the result backend. ''' pass def _run(self, data, store, signal, context, *, success_callback=None, stop_callback=None, abort_callback=None): ''' The internal run method that decorates the public run method. This method makes sure data is being passed to and from the task. Args: data (MultiTaskData): The data object that has been passed from the predecessor task. store (DataStoreDocument): The persistent data store object that allows the task to store data for access across the current workflow run. signal (TaskSignal): The signal object for tasks. It wraps the construction and sending of signals into easy to use methods. context (TaskContext): The context in which the tasks runs. success_callback: This function is called when the task completed successfully stop_callback: This function is called when a StopTask exception was raised. abort_callback: This function is called when an AbortWorkflow exception was raised. Raises: TaskReturnActionInvalid: If the return value of the task is not an Action object. Returns: Action: An Action object containing the data that should be passed on to the next task and optionally a list of successor tasks that should be executed. ''' pass def run(self, data, store, signal, context, **kwargs): ''' The main run method of a task. Implement this method in inherited classes. Args: data (MultiTaskData): The data object that has been passed from the predecessor task. store (DataStoreDocument): The persistent data store object that allows the task to store data for access across the current workflow run. signal (TaskSignal): The signal object for tasks. It wraps the construction and sending of signals into easy to use methods. context (TaskContext): The context in which the tasks runs. Returns: Action: An Action object containing the data that should be passed on to the next task and optionally a list of successor tasks that should be executed. ''' pass
45
25
10
1
5
4
2
0.8
0
8
8
7
24
11
24
24
293
53
133
61
85
107
105
37
80
16
0
2
44
7,891
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/signal.py
lightflow.models.signal.Server
class Server: """ The server for the signal system, listening for requests from clients. This implementation retrieves requests from a list stored in redis. Each request is implemented using the Request class and stored as a pickled object. The response is stored under a unique response id, so the client can pick up the response. """ def __init__(self, connection, request_key): """ Initialises the signal server. Args: connection: Reference to a signal connection object. request_key (str): The key under which the list of requests is stored. """ self._connection = connection self._request_key = '{}:{}'.format(SIGNAL_REDIS_PREFIX, request_key) def receive(self): """ Returns a single request. Takes the first request from the list of requests and returns it. If the list is empty, None is returned. Returns: Response: If a new request is available a Request object is returned, otherwise None is returned. """ pickled_request = self._connection.connection.lpop(self._request_key) return pickle.loads(pickled_request) if pickled_request is not None else None def send(self, response): """ Send a response back to the client that issued a request. Args: response (Response): Reference to the response object that should be sent. """ self._connection.connection.set('{}:{}'.format(SIGNAL_REDIS_PREFIX, response.uid), pickle.dumps(response)) def restore(self, request): """ Push the request back onto the queue. Args: request (Request): Reference to a request object that should be pushed back onto the request queue. """ self._connection.connection.rpush(self._request_key, pickle.dumps(request)) def clear(self): """ Deletes the list of requests from the redis database. """ self._connection.connection.delete(self._request_key)
class Server: ''' The server for the signal system, listening for requests from clients. This implementation retrieves requests from a list stored in redis. Each request is implemented using the Request class and stored as a pickled object. The response is stored under a unique response id, so the client can pick up the response. ''' def __init__(self, connection, request_key): ''' Initialises the signal server. Args: connection: Reference to a signal connection object. request_key (str): The key under which the list of requests is stored. ''' pass def receive(self): ''' Returns a single request. Takes the first request from the list of requests and returns it. If the list is empty, None is returned. Returns: Response: If a new request is available a Request object is returned, otherwise None is returned. ''' pass def send(self, response): ''' Send a response back to the client that issued a request. Args: response (Response): Reference to the response object that should be sent. ''' pass def restore(self, request): ''' Push the request back onto the queue. Args: request (Request): Reference to a request object that should be pushed back onto the request queue. ''' pass def clear(self): ''' Deletes the list of requests from the redis database. ''' pass
6
6
8
1
3
4
1
1.93
0
0
0
0
5
2
5
5
51
10
14
9
8
27
13
9
7
2
0
0
6
7,892
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/signal.py
lightflow.models.signal.Response
class Response: """ The response that is sent from the server to the client. This implements a custom response protocol with: - success: Specifies whether the request was successful. - payload: A dictionary with response data. The content depends on the type of response. - uid: A unique ID that matches the id of the initial request. """ def __init__(self, success, uid, *, payload=None): """ Initialise the response object. Args: success (bool): True if the request was successful. uid (str): Unique response id. payload (dict): A dictionary with the response data. """ self.success = success self.uid = uid self.payload = payload if payload is not None else {}
class Response: ''' The response that is sent from the server to the client. This implements a custom response protocol with: - success: Specifies whether the request was successful. - payload: A dictionary with response data. The content depends on the type of response. - uid: A unique ID that matches the id of the initial request. ''' def __init__(self, success, uid, *, payload=None): ''' Initialise the response object. Args: success (bool): True if the request was successful. uid (str): Unique response id. payload (dict): A dictionary with the response data. ''' pass
2
2
11
1
4
6
2
2.6
0
0
0
0
1
3
1
1
20
2
5
5
3
13
5
5
3
2
0
0
2
7,893
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/signal.py
lightflow.models.signal.Request
class Request: """ The request that is sent from a client to the server. This implements a custom request protocol with: - action: A string representing the requested action that should be executed by the server. - payload: A dictionary with data that is available to the action. The content depends on the type of action. - uid: A unique ID that is used to tag the response that follows this request. """ def __init__(self, action, *, payload=None): """ Initialise the request object. Args: action (str): A string representing the requested action that should be executed by the server. payload (dict): A dictionary with data that is available to the action. """ self.action = action self.payload = payload if payload is not None else {} self.uid = uuid.uuid4()
class Request: ''' The request that is sent from a client to the server. This implements a custom request protocol with: - action: A string representing the requested action that should be executed by the server. - payload: A dictionary with data that is available to the action. The content depends on the type of action. - uid: A unique ID that is used to tag the response that follows this request. ''' def __init__(self, action, *, payload=None): ''' Initialise the request object. Args: action (str): A string representing the requested action that should be executed by the server. payload (dict): A dictionary with data that is available to the action. ''' pass
2
2
11
1
4
6
2
2.8
0
0
0
0
1
3
1
1
21
2
5
5
3
14
5
5
3
2
0
0
2
7,894
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/signal.py
lightflow.models.signal.Client
class Client: """ The client for the signal system, sending requests to the server. This implementation sends requests to a list stored in redis. Each request is implemented using the Request class and stored as a pickled object. The response from the server is stored under the unique response id. """ def __init__(self, connection, request_key): """ Initialises the signal client. Args: connection: Reference to a signal connection object. request_key (str): The key under which the list of requests is stored. """ self._connection = connection self._request_key = '{}:{}'.format(SIGNAL_REDIS_PREFIX, request_key) def send(self, request): """ Send a request to the server and wait for its response. Args: request (Request): Reference to a request object that is sent to the server. Returns: Response: The response from the server to the request. """ self._connection.connection.rpush(self._request_key, pickle.dumps(request)) resp_key = '{}:{}'.format(SIGNAL_REDIS_PREFIX, request.uid) while True: if self._connection.polling_time > 0.0: sleep(self._connection.polling_time) response_data = self._connection.connection.get(resp_key) if response_data is not None: self._connection.connection.delete(resp_key) break return pickle.loads(response_data)
class Client: ''' The client for the signal system, sending requests to the server. This implementation sends requests to a list stored in redis. Each request is implemented using the Request class and stored as a pickled object. The response from the server is stored under the unique response id. ''' def __init__(self, connection, request_key): ''' Initialises the signal client. Args: connection: Reference to a signal connection object. request_key (str): The key under which the list of requests is stored. ''' pass def send(self, request): ''' Send a request to the server and wait for its response. Args: request (Request): Reference to a request object that is sent to the server. Returns: Response: The response from the server to the request. ''' pass
3
3
16
3
7
6
3
1.07
0
0
0
0
2
2
2
2
39
8
15
7
12
16
15
7
12
4
0
2
5
7,895
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/parameters.py
lightflow.models.parameters.Parameters
class Parameters(list): """ A list of options that the workflow requires in order to run. """ def check_missing(self, args): """ Returns the names of all options that are required but were not specified. All options that don't have a default value are required in order to run the workflow. Args: args (dict): A dictionary of the provided arguments that is checked for missing options. Returns: list: A list with the names of the options that are missing from the provided arguments. """ return [opt.name for opt in self if (opt.name not in args) and (opt.default is None)] def consolidate(self, args): """ Consolidate the provided arguments. If the provided arguments have matching options, this performs a type conversion. For any option that has a default value and is not present in the provided arguments, the default value is added. Args: args (dict): A dictionary of the provided arguments. Returns: dict: A dictionary with the type converted and with default options enriched arguments. """ result = dict(args) for opt in self: if opt.name in result: result[opt.name] = opt.convert(result[opt.name]) else: if opt.default is not None: result[opt.name] = opt.convert(opt.default) return result
class Parameters(list): ''' A list of options that the workflow requires in order to run. ''' def check_missing(self, args): ''' Returns the names of all options that are required but were not specified. All options that don't have a default value are required in order to run the workflow. Args: args (dict): A dictionary of the provided arguments that is checked for missing options. Returns: list: A list with the names of the options that are missing from the provided arguments. ''' pass def consolidate(self, args): ''' Consolidate the provided arguments. If the provided arguments have matching options, this performs a type conversion. For any option that has a default value and is not present in the provided arguments, the default value is added. Args: args (dict): A dictionary of the provided arguments. Returns: dict: A dictionary with the type converted and with default options enriched arguments. ''' pass
3
3
20
4
6
10
3
1.62
1
1
0
0
2
0
2
35
44
10
13
5
10
21
11
5
8
4
2
3
5
7,896
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/parameters.py
lightflow.models.parameters.Option
class Option: """ A single option which is required to run the workflow. The option is checked against the provided arguments to the workflow and, if available, its provided value is stored in the data store for use within the workflow. """ def __init__(self, name, default=None, help=None, type=str): """ Initialise the workflow option. Args: name (str): The name of the option under which the value will be stored. default: The default value that should be used when no value is specified. Set to None to make this a non-optional option. help (str): A short help string for this option. type: The type of the option. Supported types are: str, int, float, bool """ self._name = name self._default = default self._help = help self._type = type @property def name(self): """ Returns the name of the option. Returns: str: the name of the option. """ return self._name @property def default(self): """ Return the default value of the option. Returns: str: the default value of the option """ return self._default @property def type(self): """ Return the type of the option. Returns: type: the type of the option. """ return self._type @property def help(self): """ Return the help text of the option. Returns: str: the help text of the option. """ return self._help def convert(self, value): """ Convert the specified value to the type of the option. Args: value: The value that should be converted. Returns: The value with the type given by the option. """ if self._type is str: return str(value) elif self._type is int: try: return int(value) except (UnicodeError, ValueError): raise WorkflowArgumentError('Cannot convert {} to int'.format(value)) elif self._type is float: try: return float(value) except (UnicodeError, ValueError): raise WorkflowArgumentError('Cannot convert {} to float'.format(value)) elif self._type is bool: if isinstance(value, bool): return bool(value) value = value.lower() if value in ('true', '1', 'yes', 'y'): return True elif value in ('false', '0', 'no', 'n'): return False raise WorkflowArgumentError('Cannot convert {} to bool'.format(value)) else: return value
class Option: ''' A single option which is required to run the workflow. The option is checked against the provided arguments to the workflow and, if available, its provided value is stored in the data store for use within the workflow. ''' def __init__(self, name, default=None, help=None, type=str): ''' Initialise the workflow option. Args: name (str): The name of the option under which the value will be stored. default: The default value that should be used when no value is specified. Set to None to make this a non-optional option. help (str): A short help string for this option. type: The type of the option. Supported types are: str, int, float, bool ''' pass @property def name(self): ''' Returns the name of the option. Returns: str: the name of the option. ''' pass @property def default(self): ''' Return the default value of the option. Returns: str: the default value of the option ''' pass @property def type(self): ''' Return the type of the option. Returns: type: the type of the option. ''' pass @property def help(self): ''' Return the help text of the option. Returns: str: the help text of the option. ''' pass def convert(self, value): ''' Convert the specified value to the type of the option. Args: value: The value that should be converted. Returns: The value with the type given by the option. ''' pass
11
7
12
1
6
5
3
0.83
0
7
1
0
6
4
6
6
90
13
42
15
31
35
33
11
26
10
0
2
15
7,897
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/mongo_proxy.py
lightflow.models.mongo_proxy.MongoReconnectProxy
class MongoReconnectProxy: """ Proxy for catching AutoReconnect exceptions in function calls of another class """ def __init__(self, obj, methods): """ Initialize the MongoReconnectProxy. Args: obj: The object for which all calls should be wrapped in the AutoReconnect exception handling block. methods (set): The list of method names that should be wrapped. """ self._unproxied_object = obj self._methods = methods @property def unproxied_object(self): """ Return the unproxied object """ return self._unproxied_object def __getitem__(self, key): """ Return proxy for the object method named 'key'. """ item = self._unproxied_object[key] if callable(item): return MongoReconnectProxy(item, self._methods) return item def __getattr__(self, key): """ Depending on the type of attribute return an Executable or Proxy object. """ attr = getattr(self._unproxied_object, key) if callable(attr): if key in self._methods: return MongoExecutable(attr) else: return MongoReconnectProxy(attr, self._methods) return attr def __call__(self, *args, **kwargs): return self._unproxied_object(*args, **kwargs) def __dir__(self): return dir(self._unproxied_object) def __str__(self): return str(self._unproxied_object) def __repr__(self): return repr(self._unproxied_object)
class MongoReconnectProxy: ''' Proxy for catching AutoReconnect exceptions in function calls of another class ''' def __init__(self, obj, methods): ''' Initialize the MongoReconnectProxy. Args: obj: The object for which all calls should be wrapped in the AutoReconnect exception handling block. methods (set): The list of method names that should be wrapped. ''' pass @property def unproxied_object(self): ''' Return the unproxied object ''' pass def __getitem__(self, key): ''' Return proxy for the object method named 'key'. ''' pass def __getattr__(self, key): ''' Depending on the type of attribute return an Executable or Proxy object. ''' pass def __call__(self, *args, **kwargs): pass def __dir__(self): pass def __str__(self): pass def __repr__(self): pass
10
5
5
0
3
1
1
0.36
0
2
1
2
8
2
8
8
47
9
28
14
18
10
26
13
17
3
0
2
11
7,898
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/models/mongo_proxy.py
lightflow.models.mongo_proxy.MongoExecutable
class MongoExecutable: """ Wrapper class for catching and handling reconnect exceptions in pymongo calls. The provided callable is executed and if the pymongo library raises an AutoReconnect exception, another call is attempted. This is repeated until WAIT_TIME is reached. """ def __init__(self, method): """ Initialize the MongoExecutable. Args: method (callable): The function that should be called and for which reconnection attempts should be tried. """ self._method = method def __call__(self, *args, **kwargs): """ Call the method and handle the AutoReconnect exception gracefully """ start_time = time.time() for attempt in count(): try: return self._method(*args, **kwargs) except AutoReconnect: duration = time.time() - start_time if duration >= WAIT_TIME: break logger.warning( 'Reconnecting to MongoDB, attempt {} ({:.3f} seconds elapsed)'. format(attempt, duration)) time.sleep(self.calc_sleep(attempt)) return self._method(*args, **kwargs) def calc_sleep(self, attempt): """ Calculate the sleep time based on the number of past attempts. The sleep time grows exponentially with the attempts up to a maximum of 10 seconds. Args: attempt (int): The number of reconnection attempts. Returns: int: The number of seconds to sleep before trying the next attempt. """ return min(10, pow(2, attempt)) def __dir__(self): return dir(self._method) def __str__(self): return str(self._method) def __repr__(self): return repr(self._method)
class MongoExecutable: ''' Wrapper class for catching and handling reconnect exceptions in pymongo calls. The provided callable is executed and if the pymongo library raises an AutoReconnect exception, another call is attempted. This is repeated until WAIT_TIME is reached. ''' def __init__(self, method): ''' Initialize the MongoExecutable. Args: method (callable): The function that should be called and for which reconnection attempts should be tried. ''' pass def __call__(self, *args, **kwargs): ''' Call the method and handle the AutoReconnect exception gracefully ''' pass def calc_sleep(self, attempt): ''' Calculate the sleep time based on the number of past attempts. The sleep time grows exponentially with the attempts up to a maximum of 10 seconds. Args: attempt (int): The number of reconnection attempts. Returns: int: The number of seconds to sleep before trying the next attempt. ''' pass def __dir__(self): pass def __str__(self): pass def __repr__(self): pass
7
4
8
2
4
2
2
0.72
0
2
0
0
6
1
6
6
58
15
25
11
18
18
23
11
16
4
0
3
9
7,899
AustralianSynchrotron/lightflow
AustralianSynchrotron_lightflow/lightflow/queue/worker.py
lightflow.queue.worker.WorkerLifecycle
class WorkerLifecycle(StartStopStep): """ Class that manages the lifecycle of a worker. """ def stop(self, consumer): """ This function is called when the worker received a request to terminate. Upon the termination of the worker, the workflows for all running jobs are stopped gracefully. Args: consumer (Consumer): Reference to the consumer object that handles messages from the broker. """ stopped_workflows = [] for request in [r for r in consumer.controller.state.active_requests]: job = AsyncResult(request.id) workflow_id = job.result['workflow_id'] if workflow_id not in stopped_workflows: client = Client( SignalConnection(**consumer.app.user_options['config'].signal, auto_connect=True), request_key=workflow_id) client.send(Request(action='stop_workflow')) stopped_workflows.append(workflow_id)
class WorkerLifecycle(StartStopStep): ''' Class that manages the lifecycle of a worker. ''' def stop(self, consumer): ''' This function is called when the worker received a request to terminate. Upon the termination of the worker, the workflows for all running jobs are stopped gracefully. Args: consumer (Consumer): Reference to the consumer object that handles messages from the broker. ''' pass
2
2
23
4
12
7
3
0.62
1
3
3
0
1
0
1
1
26
5
13
7
11
8
10
7
8
3
1
2
3