repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
flyteorg/flytekit
flytekit/models/core/workflow.py
NodeMetadata.__init__
python
def __init__(self, name, timeout=None, retries=None, interruptible=None): self._name = name self._timeout = timeout if timeout is not None else datetime.timedelta() self._retries = retries if retries is not None else _RetryStrategy(0) self._interruptible = interruptible
Defines extra information about the Node. :param Text name: Friendly name for the Node. :param datetime.timedelta timeout: [Optional] Overall timeout for a task. :param flytekit.models.literals.RetryStrategy retries: [Optional] Number of retries per task. :param bool interruptible: [Optional] Can be safely interrupted during execution.
https://github.com/flyteorg/flytekit/blob/6c032035563ae645b0b93558b3fe3362080057ea/flytekit/models/core/workflow.py#L158-L170
import datetime import typing from flyteidl.core import workflow_pb2 as _core_workflow from flytekit.models import common as _common from flytekit.models import interface as _interface from flytekit.models import types as _types from flytekit.models.core import condition as _condition from flytekit.models.core import identifier as _identifier from flytekit.models.literals import Binding as _Binding from flytekit.models.literals import RetryStrategy as _RetryStrategy from flytekit.models.task import Resources class IfBlock(_common.FlyteIdlEntity): def __init__(self, condition, then_node): self._condition = condition self._then_node = then_node @property def condition(self): return self._condition @property def then_node(self): return self._then_node def to_flyte_idl(self): return _core_workflow.IfBlock(condition=self.condition.to_flyte_idl(), then_node=self.then_node.to_flyte_idl()) @classmethod def from_flyte_idl(cls, pb2_object): return cls( condition=_condition.BooleanExpression.from_flyte_idl(pb2_object.condition), then_node=Node.from_flyte_idl(pb2_object.then_node), ) class IfElseBlock(_common.FlyteIdlEntity): def __init__(self, case, other=None, else_node=None, error=None): self._case = case self._other = other self._else_node = else_node self._error = error @property def case(self): return self._case @property def other(self): return self._other @property def else_node(self): return self._else_node @property def error(self): return self._error def to_flyte_idl(self): return _core_workflow.IfElseBlock( case=self.case.to_flyte_idl(), other=[a.to_flyte_idl() for a in self.other] if self.other else None, else_node=self.else_node.to_flyte_idl() if self.else_node else None, error=self.error.to_flyte_idl() if self.error else None, ) @classmethod def from_flyte_idl(cls, pb2_object): return cls( case=IfBlock.from_flyte_idl(pb2_object.case), other=[IfBlock.from_flyte_idl(a) for a in pb2_object.other], else_node=Node.from_flyte_idl(pb2_object.else_node) if pb2_object.HasField("else_node") else None, error=_types.Error.from_flyte_idl(pb2_object.error) if pb2_object.HasField("error") else None, ) class BranchNode(_common.FlyteIdlEntity): def __init__(self, if_else: IfElseBlock): self._if_else = if_else @property def if_else(self) -> IfElseBlock: return self._if_else def to_flyte_idl(self): return _core_workflow.BranchNode(if_else=self.if_else.to_flyte_idl()) @classmethod def from_flyte_idl(cls, pb2_objct): return cls(if_else=IfElseBlock.from_flyte_idl(pb2_objct.if_else)) class NodeMetadata(_common.FlyteIdlEntity):
Apache License 2.0
plaidweb/publ
publ/tokens.py
get_token
python
def get_token(id_url: str, lifetime: int, scope: str = None) -> str: token = {'me': id_url} if scope: token['scope'] = scope return signer().dumps((token, int(time.time() + lifetime)))
Gets a signed token for the given identity
https://github.com/plaidweb/publ/blob/67efc5e32bf25dbac72a83d1167de038b79db5a7/publ/tokens.py#L23-L29
import json import logging import time import typing import flask import itsdangerous import requests import werkzeug.exceptions as http_error from .config import config LOGGER = logging.getLogger(__name__) def signer(): return itsdangerous.URLSafeSerializer(flask.current_app.secret_key)
MIT License
brython-dev/brython
www/src/Lib/_imp.py
init_frozen
python
def init_frozen(*args,**kw): pass
Initializes a frozen module.
https://github.com/brython-dev/brython/blob/33aeaab551f1b73209326c5a0aecf98642d4c126/www/src/Lib/_imp.py#L44-L46
import sys def _fix_co_filename(*args,**kw): pass def acquire_lock(*args,**kw): pass check_hash_based_pycs = """default""" def create_builtin(spec): return __import__(spec.name) def create_dynamic(*args,**kw): pass def exec_builtin(*args,**kw): pass def exec_dynamic(*args,**kw): pass def extension_suffixes(*args,**kw): return [] def get_frozen_object(*args,**kw): pass
BSD 3-Clause New or Revised License
oasis-open/cti-python-stix2
stix2/equivalence/object/__init__.py
exact_match
python
def exact_match(val1, val2): result = 0.0 if val1 == val2: result = 1.0 logger.debug("--\t\texact_match '%s' '%s'\tresult: '%s'", val1, val2, result) return result
Performs an exact value match based on two values. This method can be used for *_ref equality check when de-reference is not possible. Args: val1: A value suitable for an equality test. val2: A value suitable for an equality test. Returns: float: 1.0 if the value matches exactly, 0.0 otherwise.
https://github.com/oasis-open/cti-python-stix2/blob/81550cab92aaacbca5db0d37c607dfd1707ce4c3/stix2/equivalence/object/__init__.py#L263-L279
import collections import itertools import logging import time from ...datastore import DataSource, DataStoreMixin, Filter from ...utils import STIXdatetime, parse_into_datetime from ..pattern import equivalent_patterns logger = logging.getLogger(__name__) def object_equivalence( obj1, obj2, prop_scores={}, threshold=70, ds1=None, ds2=None, ignore_spec_version=False, versioning_checks=False, max_depth=1, **weight_dict ): similarity_result = object_similarity( obj1, obj2, prop_scores, ds1, ds2, ignore_spec_version, versioning_checks, max_depth, **weight_dict ) if similarity_result >= threshold: return True return False def object_similarity( obj1, obj2, prop_scores={}, ds1=None, ds2=None, ignore_spec_version=False, versioning_checks=False, max_depth=1, **weight_dict ): weights = WEIGHTS.copy() if weight_dict: weights.update(weight_dict) weights["_internal"] = { "ignore_spec_version": ignore_spec_version, "versioning_checks": versioning_checks, "ds1": ds1, "ds2": ds2, "max_depth": max_depth, } type1, type2 = obj1["type"], obj2["type"] if type1 != type2: raise ValueError('The objects to compare must be of the same type!') if ignore_spec_version is False and obj1.get("spec_version", "2.0") != obj2.get("spec_version", "2.0"): raise ValueError('The objects to compare must be of the same spec version!') try: weights[type1] except KeyError: logger.warning("'%s' type has no 'weights' dict specified & thus no object similarity method to call!", type1) sum_weights = matching_score = 0 else: try: method = weights[type1]["method"] except KeyError: logger.debug("Starting object similarity process between: '%s' and '%s'", obj1["id"], obj2["id"]) matching_score = 0.0 sum_weights = 0.0 for prop in weights[type1]: if check_property_present(prop, obj1, obj2): w = weights[type1][prop][0] comp_funct = weights[type1][prop][1] prop_scores[prop] = {} if comp_funct == partial_timestamp_based: contributing_score = w * comp_funct(obj1[prop], obj2[prop], weights[type1]["tdelta"]) elif comp_funct == partial_location_distance: threshold = weights[type1]["threshold"] contributing_score = w * comp_funct(obj1["latitude"], obj1["longitude"], obj2["latitude"], obj2["longitude"], threshold) elif comp_funct == reference_check or comp_funct == list_reference_check: if max_depth > 0: weights["_internal"]["max_depth"] = max_depth - 1 ds1, ds2 = weights["_internal"]["ds1"], weights["_internal"]["ds2"] if _datastore_check(ds1, ds2): contributing_score = w * comp_funct(obj1[prop], obj2[prop], ds1, ds2, **weights) elif comp_funct == reference_check: comp_funct = exact_match contributing_score = w * comp_funct(obj1[prop], obj2[prop]) elif comp_funct == list_reference_check: comp_funct = partial_list_based contributing_score = w * comp_funct(obj1[prop], obj2[prop]) prop_scores[prop]["check_type"] = comp_funct.__name__ else: continue weights["_internal"]["max_depth"] = max_depth else: contributing_score = w * comp_funct(obj1[prop], obj2[prop]) sum_weights += w matching_score += contributing_score prop_scores[prop]["weight"] = w prop_scores[prop]["contributing_score"] = contributing_score logger.debug("'%s' check -- weight: %s, contributing score: %s", prop, w, contributing_score) prop_scores["matching_score"] = matching_score prop_scores["sum_weights"] = sum_weights logger.debug("Matching Score: %s, Sum of Weights: %s", matching_score, sum_weights) else: logger.debug("Starting object similarity process between: '%s' and '%s'", obj1["id"], obj2["id"]) try: matching_score, sum_weights = method(obj1, obj2, prop_scores, **weights[type1]) except TypeError: matching_score, sum_weights = method(obj1, obj2, **weights[type1]) logger.debug("Matching Score: %s, Sum of Weights: %s", matching_score, sum_weights) if sum_weights <= 0: return 0 equivalence_score = (matching_score / sum_weights) * 100.0 return equivalence_score def check_property_present(prop, obj1, obj2): if prop == "longitude_latitude": if all(x in obj1 and x in obj2 for x in ('latitude', 'longitude')): return True elif prop in obj1 and prop in obj2: return True return False def partial_timestamp_based(t1, t2, tdelta): if not isinstance(t1, STIXdatetime): t1 = parse_into_datetime(t1) if not isinstance(t2, STIXdatetime): t2 = parse_into_datetime(t2) t1, t2 = time.mktime(t1.timetuple()), time.mktime(t2.timetuple()) result = 1 - min(abs(t1 - t2) / (86400 * tdelta), 1) logger.debug("--\t\tpartial_timestamp_based '%s' '%s' tdelta: '%s'\tresult: '%s'", t1, t2, tdelta, result) return result def partial_list_based(l1, l2): l1_set, l2_set = set(l1), set(l2) result = len(l1_set.intersection(l2_set)) / max(len(l1_set), len(l2_set)) logger.debug("--\t\tpartial_list_based '%s' '%s'\tresult: '%s'", l1, l2, result) return result
BSD 3-Clause New or Revised License
redhatqe/openshift-python-wrapper
ocp_resources/resource.py
ResourceEditor.update
python
def update(self, backup_resources=False): resource_to_patch = [] if backup_resources: LOGGER.info("ResourceEdit: Backing up old data") if self.user_backups: resource_to_patch = self._patches self._backups = self.user_backups else: for resource, update in self._patches.items(): namespace = None try: original_resource_dict = resource.instance.to_dict() except NotFoundError: original_resource_dict = list( resource.get( dyn_client=resource.client, field_selector=f"metadata.name={resource.name}", ) )[0].to_dict() namespace = update.get("metadata", {}).get("namespace") backup = self._create_backup( original=original_resource_dict, patch=update ) if namespace: backup["metadata"]["namespace"] = namespace if backup or self.action == "replace": resource_to_patch.append(resource) self._backups[resource] = backup else: LOGGER.warning( f"ResourceEdit: no diff found in patch for " f"{resource.name} -- skipping" ) if not resource_to_patch: return else: resource_to_patch = self._patches patches_to_apply = { resource: self._patches[resource] for resource in resource_to_patch } self._apply_patches_sampler( patches=patches_to_apply, action_text="Updating", action=self.action )
Prepares backup dicts (where necessary) and applies patches
https://github.com/redhatqe/openshift-python-wrapper/blob/01aa3234fc6333868222736d8f42e27943edb47a/ocp_resources/resource.py#L935-L991
import contextlib import json import logging import os import re from distutils.version import Version import kubernetes import urllib3 import yaml from openshift.dynamic import DynamicClient from openshift.dynamic.exceptions import ( ConflictError, InternalServerError, NotFoundError, ServerTimeoutError, ) from openshift.dynamic.resource import ResourceField from ocp_resources.constants import ( NOT_FOUND_ERROR_EXCEPTION_DICT, PROTOCOL_ERROR_EXCEPTION_DICT, ) from ocp_resources.utils import TimeoutExpiredError, TimeoutSampler DEFAULT_CLUSTER_RETRY_EXCEPTIONS = { ConnectionAbortedError: [], ConnectionResetError: [], InternalServerError: ["etcdserver: leader changed"], ServerTimeoutError: [], } LOGGER = logging.getLogger(__name__) TIMEOUT = 240 MAX_SUPPORTED_API_VERSION = "v1" def _collect_instance_data(directory, resource_object): with open(os.path.join(directory, f"{resource_object.name}.yaml"), "w") as fd: fd.write(resource_object.instance.to_str()) def _collect_pod_logs(dyn_client, resource_item, **kwargs): kube_v1_api = kubernetes.client.CoreV1Api(api_client=dyn_client.client) return kube_v1_api.read_namespaced_pod_log( name=resource_item.metadata.name, namespace=resource_item.metadata.namespace, **kwargs, ) def _collect_virt_launcher_data(dyn_client, directory, resource_object): if resource_object.kind == "VirtualMachineInstance": for pod in dyn_client.resources.get(kind="Pod").get().items: pod_name = pod.metadata.name pod_instance = dyn_client.resources.get( api_version=pod.apiVersion, kind=pod.kind ).get(name=pod_name, namespace=pod.metadata.namespace) if pod_name.startswith("virt-launcher"): with open(os.path.join(directory, f"{pod_name}.log"), "w") as fd: fd.write( _collect_pod_logs( dyn_client=dyn_client, resource_item=pod, container="compute", ) ) with open(os.path.join(directory, f"{pod_name}.yaml"), "w") as fd: fd.write(pod_instance.to_str()) def _collect_data_volume_data(dyn_client, directory, resource_object): if resource_object.kind == "DataVolume": cdi_worker_prefixes = ("importer", "cdi-upload") for pod in dyn_client.resources.get(kind="Pod").get().items: pod_name = pod.metadata.name pod_instance = dyn_client.resources.get( api_version=pod.apiVersion, kind=pod.kind ).get(name=pod_name, namespace=pod.metadata.namespace) if pod_name.startswith(cdi_worker_prefixes) or pod_name.endswith( "source-pod" ): with open(os.path.join(directory, f"{pod_name}.log"), "w") as fd: fd.write( _collect_pod_logs(dyn_client=dyn_client, resource_item=pod) ) with open(os.path.join(directory, f"{pod_name}.yaml"), "w") as fd: fd.write(pod_instance.to_str()) def _collect_data(resource_object, dyn_client=None): dyn_client = ( dyn_client if dyn_client else DynamicClient(kubernetes.config.new_client_from_config()) ) directory = os.environ.get("TEST_DIR_LOG") _collect_instance_data(directory=directory, resource_object=resource_object) _collect_virt_launcher_data( dyn_client=dyn_client, directory=directory, resource_object=resource_object ) _collect_data_volume_data( dyn_client=dyn_client, directory=directory, resource_object=resource_object ) def _find_supported_resource(dyn_client, api_group, kind): results = dyn_client.resources.search(group=api_group, kind=kind) sorted_results = sorted( results, key=lambda result: KubeAPIVersion(result.api_version), reverse=True ) for result in sorted_results: if KubeAPIVersion(result.api_version) <= KubeAPIVersion( MAX_SUPPORTED_API_VERSION ): return result def _get_api_version(dyn_client, api_group, kind): res = _find_supported_resource( dyn_client=dyn_client, api_group=api_group, kind=kind ) if not res: log = f"Couldn't find {kind} in {api_group} api group" LOGGER.warning(log) raise NotImplementedError(log) return res.group_version def sub_resource_level(current_class, owner_class, parent_class): for class_iterator in reversed( [ class_iterator for class_iterator in current_class.mro() if class_iterator not in owner_class.mro() and issubclass(class_iterator, parent_class) ] ): return class_iterator.__name__ class KubeAPIVersion(Version): component_re = re.compile(r"(\d+ | [a-z]+)", re.VERBOSE) def __init__(self, vstring=None): self.vstring = vstring self.version = None super().__init__(vstring=vstring) def parse(self, vstring): components = [comp for comp in self.component_re.split(vstring) if comp] for idx, obj in enumerate(components): with contextlib.suppress(ValueError): components[idx] = int(obj) errmsg = f"version '{vstring}' does not conform to kubernetes api versioning guidelines" if ( len(components) not in (2, 4) or components[0] != "v" or not isinstance(components[1], int) ): raise ValueError(errmsg) if len(components) == 4 and ( components[2] not in ("alpha", "beta") or not isinstance(components[3], int) ): raise ValueError(errmsg) self.version = components def __str__(self): return self.vstring def __repr__(self): return "KubeAPIVersion ('{0}')".format(str(self)) def _cmp(self, other): if isinstance(other, str): other = KubeAPIVersion(vstring=other) myver = self.version otherver = other.version for ver in myver, otherver: if len(ver) == 2: ver.extend(["zeta", 9999]) if myver == otherver: return 0 if myver < otherver: return -1 if myver > otherver: return 1 class ClassProperty: def __init__(self, func): self.func = func def __get__(self, obj, owner): return self.func(owner) class ValueMismatch(Exception): class Resource: api_group = None api_version = None singular_name = None class Status: SUCCEEDED = "Succeeded" FAILED = "Failed" DELETING = "Deleting" DEPLOYED = "Deployed" PENDING = "Pending" COMPLETED = "Completed" RUNNING = "Running" TERMINATING = "Terminating" class Condition: UPGRADEABLE = "Upgradeable" AVAILABLE = "Available" DEGRADED = "Degraded" PROGRESSING = "Progressing" CREATED = "Created" RECONCILE_COMPLETE = "ReconcileComplete" READY = "Ready" class Status: TRUE = "True" FALSE = "False" UNKNOWN = "Unknown" class Phase: INSTALL_READY = "InstallReady" SUCCEEDED = "Succeeded" class Reason: ALL_REQUIREMENTS_MET = "AllRequirementsMet" INSTALL_SUCCEEDED = "InstallSucceeded" class Interface: class State: UP = "up" DOWN = "down" ABSENT = "absent" class ApiGroup: ADMISSIONREGISTRATION_K8S_IO = "admissionregistration.k8s.io" APIEXTENSIONS_K8S_IO = "apiextensions.k8s.io" APIREGISTRATION_K8S_IO = "apiregistration.k8s.io" APP_KUBERNETES_IO = "app.kubernetes.io" APPS = "apps" CDI_KUBEVIRT_IO = "cdi.kubevirt.io" CONFIG_OPENSHIFT_IO = "config.openshift.io" CONSOLE_OPENSHIFT_IO = "console.openshift.io" EVENTS_K8S_IO = "events.k8s.io" FORKLIFT_KONVEYOR_IO = "forklift.konveyor.io" HCO_KUBEVIRT_IO = "hco.kubevirt.io" HOSTPATHPROVISIONER_KUBEVIRT_IO = "hostpathprovisioner.kubevirt.io" IMAGE_OPENSHIFT_IO = "image.openshift.io" K8S_CNI_CNCF_IO = "k8s.cni.cncf.io" K8S_V1_CNI_CNCF_IO = "k8s.v1.cni.cncf.io" KUBERNETES_IO = "kubernetes.io" KUBEVIRT_IO = "kubevirt.io" KUBEVIRT_KUBEVIRT_IO = "kubevirt.kubevirt.io" LITMUS_IO = "litmuschaos.io" MACHINE_OPENSHIFT_IO = "machine.openshift.io" MACHINECONFIGURATION_OPENSHIFT_IO = "machineconfiguration.openshift.io" MAISTRA_IO = "maistra.io" MONITORING_COREOS_COM = "monitoring.coreos.com" NETWORKADDONSOPERATOR_NETWORK_KUBEVIRT_IO = ( "networkaddonsoperator.network.kubevirt.io" ) NETWORKING_ISTIO_IO = "networking.istio.io" NETWORKING_K8S_IO = "networking.k8s.io" NMSTATE_IO = "nmstate.io" NODEMAINTENANCE_KUBEVIRT_IO = "nodemaintenance.kubevirt.io" OPERATOR_OPENSHIFT_IO = "operator.openshift.io" OPERATORS_COREOS_COM = "operators.coreos.com" OPERATORS_OPENSHIFT_IO = "operators.openshift.io" OS_TEMPLATE_KUBEVIRT_IO = "os.template.kubevirt.io" PACKAGES_OPERATORS_COREOS_COM = "packages.operators.coreos.com" POLICY = "policy" PROJECT_OPENSHIFT_IO = "project.openshift.io" RBAC_AUTHORIZATION_K8S_IO = "rbac.authorization.k8s.io" RIPSAW_CLOUDBULLDOZER_IO = "ripsaw.cloudbulldozer.io" ROUTE_OPENSHIFT_IO = "route.openshift.io" SCHEDULING_K8S_IO = "scheduling.k8s.io" SECURITY_ISTIO_IO = "security.istio.io" SECURITY_OPENSHIFT_IO = "security.openshift.io" SNAPSHOT_STORAGE_K8S_IO = "snapshot.storage.k8s.io" SNAPSHOT_KUBEVIRT_IO = "snapshot.kubevirt.io" SRIOVNETWORK_OPENSHIFT_IO = "sriovnetwork.openshift.io" SSP_KUBEVIRT_IO = "ssp.kubevirt.io" STORAGE_K8S_IO = "storage.k8s.io" STORAGECLASS_KUBERNETES_IO = "storageclass.kubernetes.io" TEMPLATE_KUBEVIRT_IO = "template.kubevirt.io" TEMPLATE_OPENSHIFT_IO = "template.openshift.io" UPLOAD_CDI_KUBEVIRT_IO = "upload.cdi.kubevirt.io" V2V_KUBEVIRT_IO = "v2v.kubevirt.io" VM_KUBEVIRT_IO = "vm.kubevirt.io" class ApiVersion: V1 = "v1" V1BETA1 = "v1beta1" V1ALPHA1 = "v1alpha1" V1ALPHA3 = "v1alpha3" def __init__( self, name=None, client=None, teardown=True, timeout=TIMEOUT, privileged_client=None, yaml_file=None, ): if not self.api_group and not self.api_version: raise NotImplementedError( "Subclasses of Resource require self.api_group or self.api_version to be defined" ) self.namespace = None self.name = name self.client = client self.privileged_client = privileged_client self.yaml_file = yaml_file self.resource_dict = None if not (self.name or self.yaml_file): raise ValueError("name or yaml file is required") if not self.client: try: self.client = DynamicClient( client=kubernetes.config.new_client_from_config() ) except ( kubernetes.config.ConfigException, urllib3.exceptions.MaxRetryError, ): LOGGER.error( "You need to be logged into a cluster or have $KUBECONFIG env configured" ) raise if not self.api_version: self.api_version = _get_api_version( dyn_client=self.client, api_group=self.api_group, kind=self.kind ) self.teardown = teardown self.timeout = timeout @ClassProperty def kind(cls): return sub_resource_level(cls, NamespacedResource, Resource) def _base_body(self): if self.yaml_file: with open(self.yaml_file, "r") as stream: self.resource_dict = yaml.safe_load(stream=stream.read()) self.name = self.resource_dict["metadata"]["name"] return self.resource_dict return { "apiVersion": self.api_version, "kind": self.kind, "metadata": {"name": self.name}, } def to_dict(self): return self._base_body() def __enter__(self): return self.deploy() def __exit__(self, exception_type, exception_value, traceback): if self.teardown: self.clean_up() def deploy(self): self.create() return self def clean_up(self): if os.environ.get("CNV_TEST_COLLECT_LOGS", "0") == "1": try: _collect_data(resource_object=self) except Exception as exception_: LOGGER.warning(exception_) data = self.to_dict() LOGGER.info(f"Deleting {data}") self.delete(wait=True, timeout=self.timeout) @classmethod def _prepare_resources(cls, dyn_client, singular_name, *args, **kwargs): if not cls.api_version: cls.api_version = _get_api_version( dyn_client=dyn_client, api_group=cls.api_group, kind=cls.kind ) get_kwargs = {"singular_name": singular_name} if singular_name else {} return dyn_client.resources.get( kind=cls.kind, api_version=cls.api_version, **get_kwargs ).get(*args, **kwargs) def _prepare_singular_name_kwargs(self, **kwargs): kwargs = kwargs if kwargs else {} if self.singular_name: kwargs["singular_name"] = self.singular_name return kwargs def full_api(self, **kwargs): kwargs = self._prepare_singular_name_kwargs(**kwargs) return self.client.resources.get( api_version=self.api_version, kind=self.kind, **kwargs ) @property def api(self): return self.full_api() def wait(self, timeout=TIMEOUT, sleep=1): LOGGER.info(f"Wait until {self.kind} {self.name} is created") samples = TimeoutSampler( wait_timeout=timeout, sleep=sleep, exceptions_dict={ **PROTOCOL_ERROR_EXCEPTION_DICT, **NOT_FOUND_ERROR_EXCEPTION_DICT, }, func=lambda: self.exists, ) for sample in samples: if sample: return def wait_deleted(self, timeout=TIMEOUT): LOGGER.info(f"Wait until {self.kind} {self.name} is deleted") return self.client_wait_deleted(timeout=timeout) @property def exists(self): try: return self.instance except NotFoundError: return None def client_wait_deleted(self, timeout): samples = TimeoutSampler( wait_timeout=timeout, sleep=1, func=lambda: self.exists ) for sample in samples: if not sample: return def wait_for_status(self, status, timeout=TIMEOUT, stop_status=None, sleep=1): stop_status = stop_status if stop_status else self.Status.FAILED LOGGER.info(f"Wait for {self.kind} {self.name} status to be {status}") samples = TimeoutSampler( wait_timeout=timeout, sleep=sleep, exceptions_dict=PROTOCOL_ERROR_EXCEPTION_DICT, func=self.api.get, field_selector=f"metadata.name=={self.name}", namespace=self.namespace, ) current_status = None try: for sample in samples: if sample.items: sample_status = sample.items[0].status if sample_status: current_status = sample_status.phase if current_status == status: return if current_status == stop_status: raise TimeoutExpiredError( f"Status of {self.kind} {self.name} is {current_status}" ) except TimeoutExpiredError: if current_status: LOGGER.error(f"Status of {self.kind} {self.name} is {current_status}") raise def create(self, body=None, wait=False): data = self.to_dict() if body: kind = body["kind"] name = body.get("name") api_version = body["apiVersion"] if kind != self.kind: raise ValueMismatch(f"{kind} != {self.kind}") if name and name != self.name: raise ValueMismatch(f"{name} != {self.name}") if api_version != self.api_version: raise ValueMismatch(f"{api_version} != {self.api_version}") data.update(body) LOGGER.info(f"Posting {data}") LOGGER.info(f"Create {self.kind} {self.name}") res = self.api.create(body=data, namespace=self.namespace) if wait and res: return self.wait() return res def delete(self, wait=False, timeout=TIMEOUT, body=None): try: res = self.api.delete(name=self.name, namespace=self.namespace, body=body) except NotFoundError: return False LOGGER.info(f"Delete {self.kind} {self.name}") if wait and res: return self.wait_deleted(timeout=timeout) return res @property def status(self): LOGGER.info(f"Get {self.kind} {self.name} status") return self.instance.status.phase def update(self, resource_dict): LOGGER.info(f"Update {self.kind} {self.name}: {resource_dict}") self.api.patch( body=resource_dict, namespace=self.namespace, content_type="application/merge-patch+json", ) def update_replace(self, resource_dict): LOGGER.info(f"Replace {self.kind} {self.name}: {resource_dict}") self.api.replace(body=resource_dict, name=self.name, namespace=self.namespace) @staticmethod def retry_cluster_exceptions( func, exceptions_dict=DEFAULT_CLUSTER_RETRY_EXCEPTIONS, **kwargs ): sampler = TimeoutSampler( wait_timeout=10, sleep=1, func=func, print_log=False, exceptions_dict=exceptions_dict, **kwargs, ) for sample in sampler: return sample @classmethod def get(cls, dyn_client, singular_name=None, *args, **kwargs): def _get(): _resources = cls._prepare_resources( dyn_client=dyn_client, singular_name=singular_name, *args, **kwargs ) try: for resource_field in _resources.items: yield cls(client=dyn_client, name=resource_field.metadata.name) except TypeError: yield cls(client=dyn_client, name=_resources.metadata.name) return Resource.retry_cluster_exceptions(func=_get) @property def instance(self): def _instance(): return self.api.get(name=self.name) return self.retry_cluster_exceptions(func=_instance) @property def labels(self): return self.instance["metadata"]["labels"] def wait_for_condition(self, condition, status, timeout=300): LOGGER.info( f"Wait for {self.kind}/{self.name}'s '{condition}' condition to be '{status}'" ) samples = TimeoutSampler( wait_timeout=timeout, sleep=1, exceptions_dict=PROTOCOL_ERROR_EXCEPTION_DICT, func=self.api.get, field_selector=f"metadata.name=={self.name}", namespace=self.namespace, ) for sample in samples: if ( sample.items and sample.items[0].get("status") and sample.items[0].status.get("conditions") ): sample_conditions = sample.items[0].status.conditions if sample_conditions: for cond in sample_conditions: if cond.type == condition and cond.status == status: return def api_request(self, method, action, url, **params): client = self.privileged_client or self.client response = client.client.request( method=method, url=f"{url}/{action}", headers=self.client.configuration.api_key, **params, ) try: return json.loads(response.data) except json.decoder.JSONDecodeError: return response.data def wait_for_conditions(self): samples = TimeoutSampler( wait_timeout=30, sleep=1, func=lambda: self.instance.status.conditions ) for sample in samples: if sample: return class NamespacedResource(Resource): def __init__( self, name=None, namespace=None, client=None, teardown=True, timeout=TIMEOUT, privileged_client=None, yaml_file=None, ): super().__init__( name=name, client=client, teardown=teardown, timeout=timeout, privileged_client=privileged_client, yaml_file=yaml_file, ) self.namespace = namespace if not (self.name and self.namespace) and not self.yaml_file: raise ValueError("name and namespace or yaml file is required") @classmethod def get(cls, dyn_client, singular_name=None, raw=False, *args, **kwargs): _resources = cls._prepare_resources( dyn_client=dyn_client, singular_name=singular_name, *args, **kwargs ) try: for resource_field in _resources.items: if raw: yield resource_field else: yield cls( client=dyn_client, name=resource_field.metadata.name, namespace=resource_field.metadata.namespace, ) except TypeError: if raw: yield _resources else: yield cls( client=dyn_client, name=_resources.metadata.name, namespace=_resources.metadata.namespace, ) @property def instance(self): return self.api.get(name=self.name, namespace=self.namespace) def _base_body(self): res = super(NamespacedResource, self)._base_body() if self.yaml_file: self.namespace = self.resource_dict["metadata"].get( "namespace", self.namespace ) if not self.namespace: raise ValueError("Namespace must be passed or specified in the YAML file.") if not self.yaml_file: res["metadata"]["namespace"] = self.namespace return res def to_dict(self): return self._base_body() class ResourceEditor: def __init__(self, patches, action="update", user_backups=None): self._patches = self._dictify_resourcefield(res=patches) self.action = action self.user_backups = user_backups self._backups = {} @property def backups(self): return self._backups @property def patches(self): return self._patches
Apache License 2.0
mozilla/make.mozilla.org
vendor-local/lib/python/celery/backends/base.py
BaseBackend.prepare_value
python
def prepare_value(self, result): return result
Prepare value for storage.
https://github.com/mozilla/make.mozilla.org/blob/98b87c517b463a5bae09f29284b1dabca97bb376/vendor-local/lib/python/celery/backends/base.py#L121-L123
from __future__ import absolute_import import time import sys from datetime import timedelta from kombu import serialization from .. import states from ..datastructures import LRUCache from ..exceptions import TimeoutError, TaskRevokedError from ..utils import timeutils from ..utils.encoding import bytes_to_str, ensure_bytes, from_utf8 from ..utils.serialization import (get_pickled_exception, get_pickleable_exception, create_exception_cls) EXCEPTION_ABLE_CODECS = frozenset(["pickle", "yaml"]) is_py3k = sys.version_info >= (3, 0) def unpickle_backend(cls, args, kwargs): return cls(*args, **kwargs) class BaseBackend(object): READY_STATES = states.READY_STATES UNREADY_STATES = states.UNREADY_STATES EXCEPTION_STATES = states.EXCEPTION_STATES TimeoutError = TimeoutError subpolling_interval = None supports_native_join = False def __init__(self, *args, **kwargs): from ..app import app_or_default self.app = app_or_default(kwargs.get("app")) self.serializer = kwargs.get("serializer", self.app.conf.CELERY_RESULT_SERIALIZER) (self.content_type, self.content_encoding, self.encoder) = serialization.registry._encoders[self.serializer] def encode(self, data): _, _, payload = serialization.encode(data, serializer=self.serializer) return payload def decode(self, payload): payload = is_py3k and payload or str(payload) return serialization.decode(payload, content_type=self.content_type, content_encoding=self.content_encoding) def prepare_expires(self, value, type=None): if value is None: value = self.app.conf.CELERY_TASK_RESULT_EXPIRES if isinstance(value, timedelta): value = timeutils.timedelta_seconds(value) if value is not None and type: return type(value) return value def encode_result(self, result, status): if status in self.EXCEPTION_STATES and isinstance(result, Exception): return self.prepare_exception(result) else: return self.prepare_value(result) def store_result(self, task_id, result, status, traceback=None): raise NotImplementedError( "store_result is not supported by this backend.") def mark_as_started(self, task_id, **meta): return self.store_result(task_id, meta, status=states.STARTED) def mark_as_done(self, task_id, result): return self.store_result(task_id, result, status=states.SUCCESS) def mark_as_failure(self, task_id, exc, traceback=None): return self.store_result(task_id, exc, status=states.FAILURE, traceback=traceback) def mark_as_retry(self, task_id, exc, traceback=None): return self.store_result(task_id, exc, status=states.RETRY, traceback=traceback) def mark_as_revoked(self, task_id): return self.store_result(task_id, TaskRevokedError(), status=states.REVOKED, traceback=None) def prepare_exception(self, exc): if self.serializer in EXCEPTION_ABLE_CODECS: return get_pickleable_exception(exc) return {"exc_type": type(exc).__name__, "exc_message": str(exc)} def exception_to_python(self, exc): if self.serializer in EXCEPTION_ABLE_CODECS: return get_pickled_exception(exc) return create_exception_cls(from_utf8(exc["exc_type"]), sys.modules[__name__])
BSD 3-Clause New or Revised License
severin-lemaignan/vim-minimap
autoload/drawille/drawille.py
Canvas.rows
python
def rows(self, min_x=None, min_y=None, max_x=None, max_y=None): if not self.chars.keys(): return [] minrow = min_y // 4 if min_y != None else min(self.chars.keys()) maxrow = (max_y - 1) // 4 if max_y != None else max(self.chars.keys()) mincol = min_x // 2 if min_x != None else min(min(x.keys()) for x in self.chars.values()) maxcol = (max_x - 1) // 2 if max_x != None else max(max(x.keys()) for x in self.chars.values()) ret = [] for rownum in range(minrow, maxrow+1): if not rownum in self.chars: ret.append('') continue maxcol = (max_x - 1) // 2 if max_x != None else max(self.chars[rownum].keys()) row = [] for x in range(mincol, maxcol+1): char = self.chars[rownum].get(x) if not char: row.append(' ') elif type(char) != int: row.append(char) else: row.append(unichr(braille_char_offset+char)) ret.append(''.join(row)) return ret
Returns a list of the current :class:`Canvas` object lines. :param min_x: (optional) minimum x coordinate of the canvas :param min_y: (optional) minimum y coordinate of the canvas :param max_x: (optional) maximum x coordinate of the canvas :param max_y: (optional) maximum y coordinate of the canvas
https://github.com/severin-lemaignan/vim-minimap/blob/5a415547e7584eba0bebe087fd553e13c76e8842/autoload/drawille/drawille.py#L200-L238
import math import os from sys import version_info from collections import defaultdict from time import sleep IS_PY3 = version_info[0] == 3 if IS_PY3: unichr = chr pixel_map = ((0x01, 0x08), (0x02, 0x10), (0x04, 0x20), (0x40, 0x80)) braille_char_offset = 0x2800 def getTerminalSize(): env = os.environ def ioctl_GWINSZ(fd): try: import fcntl, termios, struct cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')) except: return return cr cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) if not cr: try: fd = os.open(os.ctermid(), os.O_RDONLY) cr = ioctl_GWINSZ(fd) os.close(fd) except: pass if not cr: cr = (env.get('LINES', 25), env.get('COLUMNS', 80)) return int(cr[1]), int(cr[0]) def normalize(coord): coord_type = type(coord) if coord_type == int: return coord elif coord_type == float: return int(round(coord)) else: raise TypeError("Unsupported coordinate type <{0}>".format(type(coord))) def intdefaultdict(): return defaultdict(int) def get_pos(x, y): return normalize(x) // 2, normalize(y) // 4 class Canvas(object): def __init__(self, line_ending=os.linesep): super(Canvas, self).__init__() self.clear() self.line_ending = line_ending def clear(self): self.chars = defaultdict(intdefaultdict) def set(self, x, y): x = normalize(x) y = normalize(y) col, row = get_pos(x, y) if type(self.chars[row][col]) != int: return self.chars[row][col] |= pixel_map[y % 4][x % 2] def unset(self, x, y): x = normalize(x) y = normalize(y) col, row = get_pos(x, y) if type(self.chars[row][col]) == int: self.chars[row][col] &= ~pixel_map[y % 4][x % 2] if type(self.chars[row][col]) != int or self.chars[row][col] == 0: del(self.chars[row][col]) if not self.chars.get(row): del(self.chars[row]) def toggle(self, x, y): x = normalize(x) y = normalize(y) col, row = get_pos(x, y) if type(self.chars[row][col]) != int or self.chars[row][col] & pixel_map[y % 4][x % 2]: self.unset(x, y) else: self.set(x, y) def set_text(self, x, y, text): col, row = get_pos(x, y) for i,c in enumerate(text): self.chars[row][col+i] = c def get(self, x, y): x = normalize(x) y = normalize(y) dot_index = pixel_map[y % 4][x % 2] col, row = get_pos(x, y) char = self.chars.get(row, {}).get(col) if not char: return False if type(char) != int: return True return bool(char & dot_index)
MIT License
pyturf/pyturf
turf/envelope/_envelope.py
envelope
python
def envelope(features, *args): return bbox_polygon(bbox(features, *args))
Takes any number of features and returns a rectangular Polygon that encompasses all vertices. :param features: any GeoJSON feature or feature collection :return: bounding box extent in [minX, minY, maxX, maxY] order
https://github.com/pyturf/pyturf/blob/e865af582fed7d8ebcb77bbffd1d752c295d361a/turf/envelope/_envelope.py#L5-L13
from turf.bbox import bbox from turf.bbox_polygon import bbox_polygon
MIT License
google/osv
lib/osv/sources.py
parse_vulnerabilities
python
def parse_vulnerabilities(path, key_path=None): return _parse_vulnerabilities(_parse_vulnerability_dict(path), key_path)
Parse vulnerabilities (potentially multiple in a list).
https://github.com/google/osv/blob/1b7dba5b21c7633970d5e7f3ae2061081239da35/lib/osv/sources.py#L79-L81
import json import hashlib import logging import os import pygit2 import time import yaml from google.protobuf import json_format from . import repos from . import vulnerability_pb2 AUTHOR_EMAIL = 'infra@osv.dev' PUSH_RETRIES = 2 PUSH_RETRY_SLEEP_SECONDS = 10 YAML_EXTENSIONS = ('.yaml', '.yml') JSON_EXTENSIONS = ('.json') def parse_source_id(source_id): return source_id.split(':', 1) def repo_path(repo): return os.path.dirname(repo.path.rstrip(os.sep)) def _parse_vulnerability_dict(path): with open(path) as f: ext = os.path.splitext(path)[1] if ext in YAML_EXTENSIONS: return yaml.safe_load(f) if ext in JSON_EXTENSIONS: return json.load(f) raise RuntimeError('Unknown format ' + ext) return None def parse_vulnerability(path, key_path=None): data = _parse_vulnerability_dict(path) return parse_vulnerability_from_dict(data, key_path) def _parse_vulnerabilities(data, key_path): if isinstance(data, list): return [parse_vulnerability_from_dict(v, key_path) for v in data] return [parse_vulnerability_from_dict(data, key_path)]
Apache License 2.0
containerbuildsystem/osbs-client
osbs/build/pod_response.py
PodResponse.get_failure_reason
python
def get_failure_reason(self): reason_key = 'reason' cid_key = 'containerID' exit_key = 'exitCode' pod_status = self.json.get('status', {}) statuses = pod_status.get('containerStatuses', []) for status in statuses: try: terminated = status['state']['terminated'] exit_code = terminated['exitCode'] if exit_code != 0: reason_dict = { exit_key: exit_code, } if 'containerID' in terminated: reason_dict[cid_key] = terminated['containerID'] for key in ['message', 'reason']: try: reason_dict[reason_key] = terminated[key] break except KeyError: continue else: reason_dict[reason_key] = 'Exit code {code}'.format( code=exit_code ) return reason_dict except KeyError: continue for key in ['message', 'reason']: try: return {reason_key: pod_status[key]} except KeyError: continue return {reason_key: pod_status['phase']}
Find the reason a pod failed :return: dict, which will always have key 'reason': reason: brief reason for state containerID (if known): ID of container exitCode (if known): numeric exit code
https://github.com/containerbuildsystem/osbs-client/blob/935464af2aafacd3304606099b2e411cdb39db63/osbs/build/pod_response.py#L58-L113
from __future__ import print_function, absolute_import, unicode_literals import logging from osbs.utils import graceful_chain_get logger = logging.getLogger(__name__) class PodResponse(object): def __init__(self, pod): self._json = pod @property def json(self): return self._json def get_container_image_ids(self): statuses = graceful_chain_get(self.json, "status", "containerStatuses") if statuses is None: return {} def remove_prefix(image_id): try: index = image_id.index('://') image_id = image_id[index + 3:] except ValueError: pass return image_id return {status['image']: remove_prefix(status['imageID']) for status in statuses}
BSD 3-Clause New or Revised License
ouranosinc/xclim
xclim/testing/_utils.py
TestFile.__call__
python
def __call__(self): if not self.path.exists(): if self.url is not None: self.download() else: self.generate() if not self.path.exists(): raise FileNotFoundError return self.path
Return the path to the file.
https://github.com/ouranosinc/xclim/blob/7d5a2ea03af433bd088ea72d9402e1e090531c39/xclim/testing/_utils.py#L241-L252
import hashlib import json import logging import warnings from pathlib import Path from typing import Optional, Sequence from urllib.error import HTTPError, URLError from urllib.parse import urljoin from urllib.request import urlopen, urlretrieve import pandas as pd from xarray import Dataset from xarray import open_dataset as _open_dataset from yaml import safe_dump, safe_load _default_cache_dir = Path.home() / ".xclim_testing_data" LOGGER = logging.getLogger("xclim") __all__ = [ "open_dataset", "list_datasets", "list_input_variables", "get_all_CMIP6_variables", "update_variable_yaml", ] def file_md5_checksum(fname): hash_md5 = hashlib.md5() with open(fname, "rb") as f: hash_md5.update(f.read()) return hash_md5.hexdigest() def _get( fullname: Path, github_url: str, branch: str, suffix: str, cache_dir: Path, ) -> Path: cache_dir = cache_dir.absolute() local_file = cache_dir / branch / fullname md5name = fullname.with_suffix("{}.md5".format(suffix)) md5file = cache_dir / branch / md5name if local_file.is_file(): localmd5 = file_md5_checksum(local_file) try: url = "/".join((github_url, "raw", branch, md5name.as_posix())) LOGGER.info("Attempting to fetch remote file md5: %s" % md5name.as_posix()) urlretrieve(url, md5file) with open(md5file) as f: remote_md5 = f.read() if localmd5.strip() != remote_md5.strip(): local_file.unlink() msg = ( f"MD5 checksum for {local_file.as_posix()} does not match upstream md5. " "Attempting new download." ) warnings.warn(msg) except (HTTPError, URLError): msg = f"{md5name.as_posix()} not accessible online. Unable to determine validity with upstream repo." warnings.warn(msg) if not local_file.is_file(): local_file.parent.mkdir(parents=True, exist_ok=True) url = "/".join((github_url, "raw", branch, fullname.as_posix())) LOGGER.info("Fetching remote file: %s" % fullname.as_posix()) urlretrieve(url, local_file) try: url = "/".join((github_url, "raw", branch, md5name.as_posix())) LOGGER.info("Fetching remote file md5: %s" % md5name.as_posix()) urlretrieve(url, md5file) except HTTPError as e: msg = f"{md5name.as_posix()} not found. Aborting file retrieval." local_file.unlink() raise FileNotFoundError(msg) from e localmd5 = file_md5_checksum(local_file) try: with open(md5file) as f: remote_md5 = f.read() if localmd5.strip() != remote_md5.strip(): local_file.unlink() msg = ( f"{local_file.as_posix()} and md5 checksum do not match. " "There may be an issue with the upstream origin data." ) raise OSError(msg) except OSError as e: LOGGER.error(e) return local_file def open_dataset( name: str, suffix: Optional[str] = None, dap_url: Optional[str] = None, github_url: str = "https://github.com/Ouranosinc/xclim-testdata", branch: str = "main", cache: bool = True, cache_dir: Path = _default_cache_dir, **kwds, ) -> Dataset: name = Path(name) if suffix is None: suffix = ".nc" fullname = name.with_suffix(suffix) if dap_url is not None: dap_file = urljoin(dap_url, str(name)) try: ds = _open_dataset(dap_file, **kwds) return ds except OSError: msg = "OPeNDAP file not read. Verify that service is available." LOGGER.error(msg) raise local_file = _get( fullname=fullname, github_url=github_url, branch=branch, suffix=suffix, cache_dir=cache_dir, ) try: ds = _open_dataset(local_file, **kwds) if not cache: ds = ds.load() local_file.unlink() return ds except OSError: raise def list_datasets(github_repo="Ouranosinc/xclim-testdata", branch="main"): res = urlopen(f"https://api.github.com/repos/{github_repo}/contents?ref={branch}") base = json.loads(res.read().decode()) records = [] for folder in base: if folder["path"].startswith(".") or folder["size"] > 0: continue res = urlopen(folder["url"]) listing = json.loads(res.read().decode()) for file in listing: if file["path"].endswith(".nc"): records.append( { "name": file["path"], "size": file["size"] / 2 ** 10, "url": file["html_url"], } ) df = pd.DataFrame.from_records(records).set_index("name") print(f"Found {len(df)} datasets.") return df def as_tuple(x): if isinstance(x, (list, tuple)): return x return (x,) class TestFile: def __init__(self, name, path=None, url=None): self.name = name self.path = path self.url = url def generate(self): pass def download(self): for u, p in zip(as_tuple(self.url), as_tuple(self.path)): urlretrieve(u, str(p))
Apache License 2.0
baderlab/saber
saber/tests/test_embeddings.py
dummy_embeddings_before_load
python
def dummy_embeddings_before_load(): return Embeddings(filepath=PATH_TO_DUMMY_EMBEDDINGS, token_map=DUMMY_TOKEN_MAP, totally_arbitrary='arbitrary')
Returns an instance of an Embeddings() object BEFORE the `Embeddings.load()` method is called.
https://github.com/baderlab/saber/blob/876be6bfdb1bc5b18cbcfa848c94b0d20c940f02/saber/tests/test_embeddings.py#L55-L62
import numpy as np import pytest from ..embeddings import Embeddings from .resources import helpers from .resources.dummy_constants import * @pytest.fixture def dummy_embedding_idx(): embeddings = Embeddings(filepath=PATH_TO_DUMMY_EMBEDDINGS, token_map=DUMMY_TOKEN_MAP) embedding_idx = embeddings._prepare_embedding_index(binary=False) return embedding_idx @pytest.fixture def dummy_embedding_matrix_and_type_to_idx(): embeddings = Embeddings(filepath=PATH_TO_DUMMY_EMBEDDINGS, token_map=DUMMY_TOKEN_MAP) embedding_idx = embeddings._prepare_embedding_index(binary=False) embeddings.num_found = len(embedding_idx) embeddings.dimension = len(list(embedding_idx.values())[0]) embedding_matrix, type_to_idx = embeddings._prepare_embedding_matrix(embedding_idx, load_all=False) embeddings.num_embed = embedding_matrix.shape[0] return embedding_matrix, type_to_idx @pytest.fixture def dummy_embedding_matrix_and_type_to_idx_load_all(): test = {"This": 0, "is": 1, "a": 2, "test": 3} embeddings = Embeddings(filepath=PATH_TO_DUMMY_EMBEDDINGS, token_map=test) embedding_idx = embeddings._prepare_embedding_index(binary=False) embeddings.num_found = len(embedding_idx) embeddings.dimension = len(list(embedding_idx.values())[0]) embedding_matrix, type_to_idx = embeddings._prepare_embedding_matrix(embedding_idx, load_all=True) embeddings.num_embed = embedding_matrix.shape[0] return embedding_matrix, type_to_idx @pytest.fixture
MIT License
pytest-dev/pytest-testinfra
testinfra/modules/systeminfo.py
SystemInfo.arch
python
def arch(self): return self.sysinfo["arch"]
Host architecture >>> host.system_info.arch 'x86_64'
https://github.com/pytest-dev/pytest-testinfra/blob/1974005549cc1b37af778371879b17ea372150d8/testinfra/modules/systeminfo.py#L172-L178
import re from testinfra.modules.base import InstanceModule from testinfra.utils import cached_property class SystemInfo(InstanceModule): @cached_property def sysinfo(self): sysinfo = { "type": None, "distribution": None, "codename": None, "release": None, "arch": None, } uname = self.run_expect([0, 1], "uname -s") if uname.rc == 1 or uname.stdout.lower().startswith("msys"): sysinfo.update(**self._get_windows_sysinfo()) return sysinfo sysinfo["type"] = uname.stdout.rstrip("\r\n").lower() if sysinfo["type"] == "linux": sysinfo.update(**self._get_linux_sysinfo()) elif sysinfo["type"] == "darwin": sysinfo.update(**self._get_darwin_sysinfo()) else: sysinfo["release"] = self.check_output("uname -r") sysinfo["distribution"] = sysinfo["type"] sysinfo["codename"] = None sysinfo["arch"] = self.check_output("uname -m") return sysinfo def _get_linux_sysinfo(self): sysinfo = {} lsb = self.run("lsb_release -a") if lsb.rc == 0: for line in lsb.stdout.splitlines(): key, value = line.split(":", 1) key = key.strip().lower() value = value.strip().lower() if key == "distributor id": sysinfo["distribution"] = value elif key == "release": sysinfo["release"] = value elif key == "codename": sysinfo["codename"] = value return sysinfo os_release = self.run("cat /etc/os-release") if os_release.rc == 0: for line in os_release.stdout.splitlines(): for key, attname in ( ("ID=", "distribution"), ("VERSION_ID=", "release"), ("VERSION_CODENAME=", "codename"), ): if line.startswith(key): sysinfo[attname] = ( line[len(key) :].replace('"', "").replace("'", "").strip() ) if sysinfo["distribution"] == "arch": sysinfo["release"] = "rolling" return sysinfo redhat_release = self.run("cat /etc/redhat-release") if redhat_release.rc == 0: match = re.match( r"^(.+) release ([^ ]+) .*$", redhat_release.stdout.strip() ) if match: sysinfo["distribution"], sysinfo["release"] = match.groups() return sysinfo alpine_release = self.run("cat /etc/alpine-release") if alpine_release.rc == 0: sysinfo["distribution"] = "alpine" sysinfo["release"] = alpine_release.stdout.strip() return sysinfo return sysinfo def _get_darwin_sysinfo(self): sysinfo = {} sw_vers = self.run("sw_vers") if sw_vers.rc == 0: for line in sw_vers.stdout.splitlines(): key, value = line.split(":", 1) key = key.strip().lower() value = value.strip() if key == "productname": sysinfo["distribution"] = value elif key == "productversion": sysinfo["release"] = value return sysinfo def _get_windows_sysinfo(self): sysinfo = {} for line in self.check_output('systeminfo | findstr /B /C:"OS"').splitlines(): key, value = line.split(":", 1) key = key.strip().replace(" ", "_").lower() value = value.strip() if key == "os_name": sysinfo["distribution"] = value sysinfo["type"] = value.split(" ")[1].lower() elif key == "os_version": sysinfo["release"] = value sysinfo["arch"] = self.check_output("echo %PROCESSOR_ARCHITECTURE%") return sysinfo @property def type(self): return self.sysinfo["type"] @property def distribution(self): return self.sysinfo["distribution"] @property def release(self): return self.sysinfo["release"] @property def codename(self): return self.sysinfo["codename"] @property
Apache License 2.0
complianceascode/auditree-framework
compliance/utils/data_parse.py
deep_merge
python
def deep_merge(a, b, path=None, append=False): for key in b: if key in a: if isinstance(a[key], dict) and isinstance(b[key], dict): deep_merge(a[key], b[key], (path or []) + [str(key)], append) continue is_lists = isinstance(a[key], list) and isinstance(b[key], list) if is_lists and append: a[key] += b[key] else: a[key] = b[key] else: a[key] = b[key] return a
Merge two dicts, taking into account any sub (or sub-sub-*) dicts. If ``append`` is ``True`` then list values from ``b`` will be appended to ``a``'s. Modified from: https://stackoverflow.com/a/7205107/566346
https://github.com/complianceascode/auditree-framework/blob/c8a64758e35c656af6c65f5516acaf3f18b6168f/compliance/utils/data_parse.py#L77-L96
import hashlib import json def parse_dot_key(data, key): for key_part in key.split('.'): data = data.get(key_part) if data is None: break return data def get_sha256_hash(key, size=None): partition_hash = hashlib.sha256() for part in key: partition_hash.update(str(part).encode('utf-8')) sha256_hash = partition_hash.hexdigest() if not size or size > len(sha256_hash): size = len(sha256_hash) return sha256_hash[:size] def format_json(data, **addl_kwargs): return json.dumps( data, indent=2, sort_keys=True, separators=(',', ': '), **addl_kwargs )
Apache License 2.0
red-hat-storage/ocs-ci
ocs_ci/deployment/ibmcloud.py
IBMCloudOCPDeployment.deploy
python
def deploy(self, log_level=""): ibmcloud.create_cluster(self.cluster_name) kubeconfig_path = os.path.join( config.ENV_DATA["cluster_path"], config.RUN["kubeconfig_location"] ) ibmcloud.get_kubeconfig(self.cluster_name, kubeconfig_path) self.test_cluster()
Deployment specific to OCP cluster on a cloud platform. Args: log_cli_level (str): openshift installer's log level
https://github.com/red-hat-storage/ocs-ci/blob/81bc3dd3c2bccbf875ffa8fa5fa2eb0ac9d52b7e/ocs_ci/deployment/ibmcloud.py#L38-L52
import logging import os from ocs_ci.framework import config from ocs_ci.deployment.cloud import CloudDeploymentBase from ocs_ci.deployment.ocp import OCPDeployment as BaseOCPDeployment from ocs_ci.utility import ibmcloud logger = logging.getLogger(__name__) __all__ = ["IBMCloud"] class IBMCloudOCPDeployment(BaseOCPDeployment): def __init__(self): super(IBMCloudOCPDeployment, self).__init__() def deploy_prereq(self): super(IBMCloudOCPDeployment, self).deploy_prereq()
MIT License
pistony/torch-toolbox
torchtoolbox/transform/functional.py
adjust_gamma
python
def adjust_gamma(img, gamma, gain=1): if not _is_numpy_image(img): raise TypeError('img should be PIL Image. Got {}'.format(type(img))) if gamma < 0: raise ValueError('Gamma should be a non-negative real number') im = img.astype(np.float32) im = 255. * gain * np.power(im / 255., gamma) im = im.clip(min=0., max=255.) return im.astype(img.dtype)
r"""Perform gamma correction on an image. Also known as Power Law Transform. Intensities in RGB mode are adjusted based on the following equation: .. math:: I_{\text{out}} = 255 \times \text{gain} \times \left(\frac{I_{\text{in}}}{255}\right)^{\gamma} See `Gamma Correction`_ for more details. .. _Gamma Correction: https://en.wikipedia.org/wiki/Gamma_correction Args: img (CV Image): CV Image to be adjusted. gamma (float): Non negative real number, same as :math:`\gamma` in the equation. gamma larger than 1 make the shadows darker, while gamma smaller than 1 make dark regions lighter. gain (float): The constant multiplier.
https://github.com/pistony/torch-toolbox/blob/0f21189f598c77166dd87f7e29e98358a07cf166/torchtoolbox/transform/functional.py#L581-L610
from __future__ import division from string import ascii_letters import torch import math import cv2 import numpy as np import random import numbers import collections Sequence = collections.abc.Sequence Iterable = collections.abc.Iterable INTER_MODE = {'NEAREST': cv2.INTER_NEAREST, 'BILINEAR': cv2.INTER_LINEAR, 'BICUBIC': cv2.INTER_CUBIC} PAD_MOD = { 'constant': cv2.BORDER_CONSTANT, 'edge': cv2.BORDER_REPLICATE, 'reflect': cv2.BORDER_DEFAULT, 'symmetric': cv2.BORDER_REFLECT } def _is_tensor_image(img): return torch.is_tensor(img) and img.ndimension() == 3 def _is_numpy(img): return isinstance(img, np.ndarray) def _is_numpy_image(img): return img.ndim in {2, 3} def to_tensor(pic): if _is_numpy_image(pic): if pic.ndim == 2: pic = cv2.cvtColor(pic, cv2.COLOR_GRAY2RGB) img = torch.from_numpy(pic.transpose((2, 0, 1))) if isinstance(img, torch.ByteTensor): return img.float().div(255) else: return img else: raise TypeError('pic should be ndarray. Got {}.'.format(type(pic))) def to_cv_image(pic, mode=None): if not (_is_numpy_image(pic) or _is_tensor_image(pic)): raise TypeError('pic should be Tensor or ndarray. Got {}.'.format(type(pic))) if isinstance(pic, torch.FloatTensor): pic = pic.mul(255).byte() if isinstance(pic, torch.Tensor): pic = pic.numpy().transpose((1, 2, 0)).squeeze() if not isinstance(pic, np.ndarray): raise TypeError('Input pic must be a torch.Tensor or NumPy ndarray, ' + 'not {}'.format(type(pic))) if mode is not None: pic = cv2.cvtColor(pic, mode) return pic def normalize(tensor, mean, std, inplace=False): if not _is_tensor_image(tensor): raise TypeError('tensor is not a torch image.') if not inplace: tensor = tensor.clone() dtype = tensor.dtype mean = torch.as_tensor(mean, dtype=dtype, device=tensor.device) std = torch.as_tensor(std, dtype=dtype, device=tensor.device) tensor.sub_(mean[:, None, None]).div_(std[:, None, None]) return tensor def resize(img, size, interpolation='BILINEAR'): if not _is_numpy_image(img): raise TypeError('img should be CV Image. Got {}'.format(type(img))) if not (isinstance(size, int) or (isinstance(size, Iterable) and len(size) == 2)): raise TypeError('Got inappropriate size arg: {}'.format(size)) interpolation = INTER_MODE[interpolation] if isinstance(size, int): w, h, _ = img.shape if (w <= h and w == size) or (h <= w and h == size): return img if w < h: ow = size oh = int(size * h / w) else: oh = size ow = int(size * w / h) else: oh, ow = map(int, size) return cv2.resize(img, (ow, oh), interpolation=interpolation) def pad(img, padding, fill=0, padding_mode='constant'): if not _is_numpy_image(img): raise TypeError('img should be CV Image. Got {}'.format(type(img))) if not isinstance(padding, (numbers.Number, tuple)): raise TypeError('Got inappropriate padding arg') if not isinstance(fill, (numbers.Number, str, tuple)): raise TypeError('Got inappropriate fill arg') if not isinstance(padding_mode, str): raise TypeError('Got inappropriate padding_mode arg') if isinstance(padding, Sequence) and len(padding) not in [2, 4]: raise ValueError("Padding must be an int or a 2, or 4 element tuple, not a " + "{} element tuple".format(len(padding))) assert padding_mode in ['constant', 'edge', 'reflect', 'symmetric'], 'Padding mode should be either constant, edge, reflect or symmetric' if isinstance(padding, int): pad_left = pad_right = pad_top = pad_bottom = padding if isinstance(padding, Sequence) and len(padding) == 2: pad_left = pad_right = padding[0] pad_top = pad_bottom = padding[1] if isinstance(padding, Sequence) and len(padding) == 4: pad_left, pad_top, pad_right, pad_bottom = padding if isinstance(fill, numbers.Number): fill = (fill, ) * (2 * len(img.shape) - 3) if padding_mode == 'constant': assert (len(fill) == 3 and len(img.shape) == 3) or (len(fill) == 1 and len( img.shape) == 2), 'channel of image is {} but length of fill is {}'.format(img.shape[-1], len(fill)) img = cv2.copyMakeBorder(img, pad_top, pad_bottom, pad_left, pad_right, PAD_MOD[padding_mode], value=fill) return img def crop(img, i, j, h, w): if not _is_numpy_image(img): raise TypeError('img should be CV Image. Got {}'.format(type(img))) return img[j:j + w, i:i + h, ...].copy() def center_crop(img, output_size): if isinstance(output_size, numbers.Number): output_size = (int(output_size), int(output_size)) w, h, _ = img.shape th, tw = output_size i = int(round((h - th) / 2.)) j = int(round((w - tw) / 2.)) return crop(img, i, j, th, tw) def resized_crop(img, i, j, h, w, size, interpolation='BILINEAR'): assert _is_numpy_image(img), 'img should be CV Image' img = crop(img, i, j, h, w) img = resize(img, size, interpolation) return img def hflip(img): if not _is_numpy_image(img): raise TypeError('img should be CV Image. Got {}'.format(type(img))) return cv2.flip(img, 0) def vflip(img): if not _is_numpy_image(img): raise TypeError('img should be CV Image. Got {}'.format(type(img))) return cv2.flip(img, 1) def _get_perspective_coeffs(h, w, shear, anglex, angley, anglez, scale, translate, fov): centery = h * 0.5 centerx = w * 0.5 alpha = math.radians(shear) beta = math.radians(anglez) lambda1 = scale[0] lambda2 = scale[1] tx = translate[0] ty = translate[1] sina = math.sin(alpha) cosa = math.cos(alpha) sinb = math.sin(beta) cosb = math.cos(beta) M00 = cosb * (lambda1 * cosa ** 2 + lambda2 * sina ** 2) - sinb * (lambda2 - lambda1) * sina * cosa M01 = - sinb * (lambda1 * sina ** 2 + lambda2 * cosa ** 2) + cosb * (lambda2 - lambda1) * sina * cosa M10 = sinb * (lambda1 * cosa ** 2 + lambda2 * sina ** 2) + cosb * (lambda2 - lambda1) * sina * cosa M11 = + cosb * (lambda1 * sina ** 2 + lambda2 * cosa ** 2) + sinb * (lambda2 - lambda1) * sina * cosa M02 = centerx - M00 * centerx - M01 * centery + tx M12 = centery - M10 * centerx - M11 * centery + ty affine_matrix = np.array([[M00, M01, M02], [M10, M11, M12], [0, 0, 1]], dtype=np.float32) z = np.sqrt(w**2 + h**2) / 2 / np.tan(math.radians(fov / 2)) radx = math.radians(anglex) rady = math.radians(angley) sinx = math.sin(radx) cosx = math.cos(radx) siny = math.sin(rady) cosy = math.cos(rady) r = np.array([[cosy, 0, -siny, 0], [-siny * sinx, cosx, -sinx * cosy, 0], [cosx * siny, sinx, cosx * cosy, 0], [0, 0, 0, 1]]) pcenter = np.array([centerx, centery, 0, 0], np.float32) p1 = np.array([0, 0, 0, 0], np.float32) - pcenter p2 = np.array([w, 0, 0, 0], np.float32) - pcenter p3 = np.array([0, h, 0, 0], np.float32) - pcenter p4 = np.array([w, h, 0, 0], np.float32) - pcenter dst1 = r.dot(p1) dst2 = r.dot(p2) dst3 = r.dot(p3) dst4 = r.dot(p4) list_dst = [dst1, dst2, dst3, dst4] org = np.array([[0, 0], [w, 0], [0, h], [w, h]], np.float32) dst = np.zeros((4, 2), np.float32) for i in range(4): dst[i, 0] = list_dst[i][0] * z / (z - list_dst[i][2]) + pcenter[0] dst[i, 1] = list_dst[i][1] * z / (z - list_dst[i][2]) + pcenter[1] perspective_matrix = cv2.getPerspectiveTransform(org, dst) matrix = perspective_matrix @ affine_matrix return matrix def perspective(img, fov=45, anglex=0, angley=0, anglez=0, shear=0, translate=(0, 0), scale=(1, 1), resample='BILINEAR', fillcolor=(0, 0, 0)): if not _is_numpy_image(img): raise TypeError('img should be CV Image. Got {}'.format(type(img))) imgtype = img.dtype gray_scale = False if len(img.shape) == 2: gray_scale = True img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB) h, w, _ = img.shape matrix = _get_perspective_coeffs(h, w, shear, anglex, angley, anglez, scale, translate, fov) img = cv2.warpPerspective(img, matrix, (w, h), flags=INTER_MODE[resample], borderMode=cv2.BORDER_CONSTANT, borderValue=fillcolor) if gray_scale: img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) return img.astype(imgtype) def five_crop(img, size): if isinstance(size, numbers.Number): size = (int(size), int(size)) else: assert len(size) == 2, "Please provide only two dimensions (h, w) for size." w, h, _ = img.shape crop_h, crop_w = size if crop_w > w or crop_h > h: raise ValueError("Requested crop size {} is bigger than input size {}".format(size, (h, w))) tl = img.crop((0, 0, crop_w, crop_h)) tr = img.crop((w - crop_w, 0, w, crop_h)) bl = img.crop((0, h - crop_h, crop_w, h)) br = img.crop((w - crop_w, h - crop_h, w, h)) center = center_crop(img, (crop_h, crop_w)) return (tl, tr, bl, br, center) def ten_crop(img, size, vertical_flip=False): if isinstance(size, numbers.Number): size = (int(size), int(size)) else: assert len(size) == 2, "Please provide only two dimensions (h, w) for size." first_five = five_crop(img, size) if vertical_flip: img = vflip(img) else: img = hflip(img) second_five = five_crop(img, size) return first_five + second_five def adjust_brightness(img, brightness_factor): if not _is_numpy_image(img): raise TypeError('img should be CV Image. Got {}'.format(type(img))) im = img.astype(np.float32) * brightness_factor im = im.clip(min=0, max=255) return im.astype(img.dtype) def adjust_contrast(img, contrast_factor): if not _is_numpy_image(img): raise TypeError('img should be CV Image. Got {}'.format(type(img))) im = img.astype(np.float32) mean = round(cv2.cvtColor(im, cv2.COLOR_RGB2GRAY).mean()) im = (1 - contrast_factor) * mean + contrast_factor * im im = im.clip(min=0, max=255) return im.astype(img.dtype) def adjust_saturation(img, saturation_factor): if not _is_numpy_image(img): raise TypeError('img should be CV Image. Got {}'.format(type(img))) im = img.astype(np.float32) degenerate = cv2.cvtColor(cv2.cvtColor(im, cv2.COLOR_RGB2GRAY), cv2.COLOR_GRAY2RGB) im = (1 - saturation_factor) * degenerate + saturation_factor * im im = im.clip(min=0, max=255) return im.astype(img.dtype) def adjust_hue(img, hue_factor): if not (-0.5 <= hue_factor <= 0.5): raise ValueError('hue_factor is not in [-0.5, 0.5].'.format(hue_factor)) if not _is_numpy_image(img): raise TypeError('img should be CV Image. Got {}'.format(type(img))) im = img.astype(np.uint8) hsv = cv2.cvtColor(im, cv2.COLOR_RGB2HSV_FULL) hsv[..., 0] += np.uint8(hue_factor * 255) im = cv2.cvtColor(hsv, cv2.COLOR_HSV2RGB_FULL) return im.astype(img.dtype)
BSD 3-Clause New or Revised License
atmtools/typhon
typhon/physics/units/em.py
SRF.integrate_radiances
python
def integrate_radiances(self, f, L, spectral=True): fnc = scipy.interpolate.interp1d( self.frequency, self.W, bounds_error=False, fill_value=0.0) w_on_L_grid = ureg.Quantity(fnc(f), ureg.dimensionless) df = ureg.Quantity(numpy.diff(f), f.u) w1p = w_on_L_grid[1:] L1p = L[:, 1:] if L.shape[0] == 0: ch_rad = numpy.empty(dtype="f8", shape=L.shape[:-1]) else: ch_rad = numexpr.evaluate("sum(w1p * L1p * df, {:d})".format( L.ndim - 1)) ch_rad = ureg.Quantity(ch_rad, w1p.u * L1p.u * df.u) if spectral: return ch_rad / (w1p*df).sum() else: return ch_rad
From a spectrum of radiances and a SRF, calculate channel (spectral) radiance The spectral response function may not be specified on the same grid as the spectrum of radiances. Therefore, this function interpolates the spectral response function onto the grid of the radiances. This is less bad than the reverse, because a spectral response function tends to be more smooth than a spectrum. **Approximations:** * Interpolation of spectral response function onto frequency grid on which radiances are specified. :param ndarray f: Frequencies for spectral radiances [Hz] :param ndarray L: Spectral radiances [W m^-2 sr^-1 Hz^-1]. Can be in radiance units of various kinds. Make sure this is consistent with the spectral response function. Innermost dimension must correspond to frequencies. :param bool spectral: If true, return spectral radiance [W m^-2 sr^-1 Hz^-1]. If false, return radiance [W m^-2 sr^-1]. Defaults to True. :returns: Channel (spectral) radiance according to 'spectral'
https://github.com/atmtools/typhon/blob/815dcb1d7cb2718ffe81cd08386739438e7782cc/typhon/physics/units/em.py#L304-L353
import warnings import logging import numpy import scipy.interpolate import numexpr import pint import xarray from typhon import config from typhon.constants import (h, k, c) from typhon.physics.units.common import (ureg, radiance_units) from typhon.physics.units.tools import UnitsAwareDataArray as UADA logger = logging.getLogger(__name__) __all__ = [ 'FwmuMixin', 'SRF', 'planck_f', 'specrad_wavenumber2frequency', 'specrad_frequency_to_planck_bt', ] class FwmuMixin: _frequency = None _wavenumber = None _wavelength = None @property def frequency(self): return self._frequency @frequency.setter def frequency(self, value): try: self._frequency = value.to(ureg.Hz, "sp") except AttributeError: value = value * ureg.Hz self._frequency = value self._wavenumber = value.to(1 / ureg.centimeter, "sp") self._wavelength = value.to(ureg.metre, "sp") @property def wavenumber(self): return self._wavenumber @wavenumber.setter def wavenumber(self, value): try: self._wavenumber = value.to(1 / ureg.centimeter, "sp") except AttributeError: value = value * 1 / ureg.centimeter self._wavenumber = value self._frequency = value.to(ureg.Hz, "sp") self._wavelength = value.to(ureg.metre, "sp") @property def wavelength(self): return self._wavelength @wavelength.setter def wavelength(self, value): try: self._wavelength = value.to(ureg.metre, "sp") except AttributeError: value = value * ureg.meter self._wavelength = value self._frequency = value.to(ureg.hertz, "sp") self._wavenumber = value.to(1 / ureg.centimeter, "sp") class SRF(FwmuMixin): T_lookup_table = numpy.arange(0, 500.01, 0.05) * ureg.K lookup_table = None L_to_T = None def __init__(self, f, W): try: self.frequency = f.to("Hz", "sp") except AttributeError: self.frequency = ureg.Quantity(f, "Hz") self.W = W def __repr__(self): cf = self.centroid() if cf.to("Hz", "sp").m > 3e12: s = cf.to(ureg.um, "sp") else: s = cf.to(ureg.GHz, "sp") return "<{:s}: {:.4~}>".format(self.__class__.__name__, s) @classmethod def fromArtsXML(cls, sat, instr, ch): from pyarts import xml cf = config.conf[instr] centres = xml.load( cf["srf_backend_f"].format(sat=sat)) gfs = xml.load( cf["srf_backend_response"].format(sat=sat)) freq = gfs[ch - 1].grids[0] + centres[ch - 1] response = gfs[ch - 1].data return cls(freq, response) @classmethod def fromRTTOV(cls, sat, instr, ch): cf = config.conf[instr] M = numpy.loadtxt(cf["srf_rttov"].format(sat=sat, ch=ch), skiprows=4) wn = ureg.Quantity(M[:, 0], 1/ureg.cm) W = M[:, 1] return cls(wn, W) def centroid(self): return numpy.average( self.frequency, weights=self.W) * self.frequency.units def blackbody_radiance(self, T, spectral=True): try: T = T.to("K") except AttributeError: T = ureg.Quantity(T, "K") T = ureg.Quantity(numpy.atleast_1d(T), T.u) shp = T.shape return self.integrate_radiances( self.frequency, planck_f( self.frequency[numpy.newaxis, :], T.reshape((-1,))[:, numpy.newaxis]), spectral=spectral).reshape(shp) def make_lookup_table(self): self.lookup_table = numpy.zeros( shape=(2, self.T_lookup_table.size), dtype=numpy.float64) self.lookup_table[0, :] = self.T_lookup_table self.lookup_table[1, :] = self.blackbody_radiance(self.T_lookup_table) self.L_to_T = scipy.interpolate.interp1d(self.lookup_table[1, :], self.lookup_table[0, :], kind='linear', bounds_error=False, fill_value=(0, 2000))
MIT License
andremiras/etherollapp
src/etherollapp/etheroll/ui_utils.py
Dialog.create_dialog
python
def create_dialog(cls, title, body): dialog = cls.create_dialog_helper(title, body) dialog.add_action_button( "Dismiss", action=lambda *x: dialog.dismiss()) return dialog
Creates a dialog from given title and body. Adds it to the dialogs track list. Appends dismiss action.
https://github.com/andremiras/etherollapp/blob/2ccc30fad736a6fee0cba8b99c521bee6ad13087/src/etherollapp/etheroll/ui_utils.py#L100-L110
import os import threading from typing import List from kivy.app import App from kivy.clock import mainthread from kivy.lang import Builder from kivy.metrics import dp from kivy.uix.boxlayout import BoxLayout from kivy.uix.screenmanager import Screen from kivymd.dialog import MDDialog from kivymd.label import MDLabel from kivymd.snackbar import Snackbar from layoutmargin import AddMargin, MarginLayout def load_kv_from_py(f): filename = os.path.basename(os.path.splitext(f)[0]) Builder.load_file( os.path.join( os.path.dirname(os.path.abspath(f)), filename + '.kv' ) ) class Dialog: dialogs: List[MDDialog] = [] __lock = threading.Lock() @staticmethod @mainthread def snackbar_message(text): Snackbar(text=text).show() @classmethod def show_invalid_form_dialog(cls): title = "Invalid form" body = "Please check form fields." dialog = cls.create_dialog(title, body) dialog.open() @classmethod def on_dialog_dismiss(cls, dialog): with cls.__lock: try: cls.dialogs.remove(dialog) except ValueError: pass @classmethod def dismiss_all_dialogs(cls): dialogs = cls.dialogs[:] for dialog in dialogs: dialog.dismiss() @classmethod def create_dialog_content_helper(cls, title, content): dialog = MDDialog( title=title, content=content, size_hint=(.8, None), height=dp(250), auto_dismiss=False) dialog.bind(on_dismiss=cls.on_dialog_dismiss) with cls.__lock: cls.dialogs.append(dialog) return dialog @classmethod def create_dialog_helper(cls, title, body): content = MDLabel( font_style='Body1', theme_text_color='Secondary', text=body, size_hint_y=None, valign='top') content.bind(texture_size=content.setter('size')) dialog = cls.create_dialog_content_helper(title, content) return dialog @classmethod
MIT License
ronnyandersson/zignal
zignal/filters/linearfilter.py
Filter.phase_resp
python
def phase_resp(self, frequencies=None, unwrap=False): w, h = self.complex_freq_resp(frequencies) phase = np.angle(h, deg=False) phase = np.unwrap(phase) if unwrap else phase phase = np.rad2deg(phase) freqs = rad2hz(w, self.fs) return freqs, phase
Calculate the real phase response
https://github.com/ronnyandersson/zignal/blob/b519d8a949974d9e78d974fa3c478958e00203a3/zignal/filters/linearfilter.py#L164-L172
import logging import matplotlib.pyplot as plt import numpy as np import scipy.signal from zignal import hz2rad, rad2hz class Filter(object): def __init__(self, B=None, A=None, fs=96000): self._logger = logging.getLogger(__name__) assert fs > 0 self.fs = fs self._B = None self._A = None self.set_coefficients(B=B, A=A) def __str__(self): s = '=======================================\n' s += 'classname : %s\n' % self.__class__.__name__ s += 'sample rate : %.1f [Hz]\n' % self.fs s += 'feedforward (B) : %s\n' % str(self._B) s += 'feedback (A) : %s\n' % str(self._A) s += 'number of zeros : %i\n' % (len(self._B)-1) s += 'number of poles : %i\n' % (len(self._A)-1) s += 'minimum phase? : %s\n' % ("Yes" if self.is_minimum_phase() else "No") s += '-----------------:---------------------\n' return s def __repr__(self): return "Filter(B=%s, A=%s, fs=%s)" % (list(self._B), list(self._A), self.fs) def filter_samples(self, samples): return scipy.signal.lfilter(self._B, self._A, samples, axis=0) def set_coefficients(self, B=None, A=None): self._logger.debug("set coefficients (class Filter)") self._B = np.array((1,)) if B is None else np.array(B) self._A = np.array((1,)) if A is None else np.array(A) assert len(self._B) != 0 assert len(self._A) != 0 def get_coefficients(self): return self._B, self._A def get_feed_forward(self): return self._B def get_feed_back(self): return self._A def normalise(self): assert len(self._A) >= 1 assert len(self._B) >= 1 a0 = self._A[0] self._logger.debug("normalising using a0: %.4f" % a0) self._B = self._B/a0 self._A = self._A/a0 def is_stable(self): isStable = True unused_zeros, poles, unused_gain = scipy.signal.tf2zpk(self._B, self._A) for pole in poles: if not np.abs(pole) < 1.0: isStable = False return isStable def is_minimum_phase(self): isMinPhase = True zeros, poles, unused_gain = scipy.signal.tf2zpk(self._B, self._A) for pole in poles: if not np.abs(pole) < 1.0: isMinPhase = False for zero in zeros: if not np.abs(zero) < 1.0: isMinPhase = False return isMinPhase def complex_freq_resp(self, frequencies=None): if frequencies is None: w, h = scipy.signal.freqz(self._B, self._A, worN=None) elif isinstance(frequencies, int): w, h = scipy.signal.freqz(self._B, self._A, worN=frequencies) else: w, h = scipy.signal.freqz(self._B, self._A, worN=hz2rad(frequencies, self.fs)) return w, h def magnitude_resp(self, frequencies=None): w, h = self.complex_freq_resp(frequencies) mag = 20*np.log10(np.absolute(h)) freqs = rad2hz(w, self.fs) return freqs, mag
MIT License
rhming/unicomdailytask
Crypto/Hash/SHA1.py
SHA1Hash.new
python
def new(self, data=None): return SHA1Hash(data)
Create a fresh SHA-1 hash object.
https://github.com/rhming/unicomdailytask/blob/542cfe86fe010748fc15b0eff45f16673c0f2b62/Crypto/Hash/SHA1.py#L142-L145
from Crypto.Util.py3compat import * from Crypto.Util._raw_api import (load_pycryptodome_raw_lib, VoidPointer, SmartPointer, create_string_buffer, get_raw_buffer, c_size_t, c_uint8_ptr) _raw_sha1_lib = load_pycryptodome_raw_lib("Crypto.Hash._SHA1", """ #define SHA1_DIGEST_SIZE 20 int SHA1_init(void **shaState); int SHA1_destroy(void *shaState); int SHA1_update(void *hs, const uint8_t *buf, size_t len); int SHA1_digest(const void *shaState, uint8_t digest[SHA1_DIGEST_SIZE]); int SHA1_copy(const void *src, void *dst); int SHA1_pbkdf2_hmac_assist(const void *inner, const void *outer, const uint8_t first_digest[SHA1_DIGEST_SIZE], uint8_t final_digest[SHA1_DIGEST_SIZE], size_t iterations); """) class SHA1Hash(object): digest_size = 20 block_size = 64 oid = "1.3.14.3.2.26" def __init__(self, data=None): state = VoidPointer() result = _raw_sha1_lib.SHA1_init(state.address_of()) if result: raise ValueError("Error %d while instantiating SHA1" % result) self._state = SmartPointer(state.get(), _raw_sha1_lib.SHA1_destroy) if data: self.update(data) def update(self, data): result = _raw_sha1_lib.SHA1_update(self._state.get(), c_uint8_ptr(data), c_size_t(len(data))) if result: raise ValueError("Error %d while instantiating SHA1" % result) def digest(self): bfr = create_string_buffer(self.digest_size) result = _raw_sha1_lib.SHA1_digest(self._state.get(), bfr) if result: raise ValueError("Error %d while instantiating SHA1" % result) return get_raw_buffer(bfr) def hexdigest(self): return "".join(["%02x" % bord(x) for x in self.digest()]) def copy(self): clone = SHA1Hash() result = _raw_sha1_lib.SHA1_copy(self._state.get(), clone._state.get()) if result: raise ValueError("Error %d while copying SHA1" % result) return clone
MIT License
tanghaibao/jcvi
jcvi/graphics/glyph.py
get_asymmetry
python
def get_asymmetry(ax, radius): x0, y0 = ax.transAxes.transform((0, 0)) x1, y1 = ax.transAxes.transform((1, 1)) dx = x1 - x0 dy = y1 - y0 maxd = max(dx, dy) width = radius * maxd / dx height = radius * maxd / dy return width, height
Calculates asymmetry of x and y axes. For axes that do not keep equal aspect ratio. Args: ax (Axes): matplotlib axes radius (float):
https://github.com/tanghaibao/jcvi/blob/3b161796234670ce1c4894974eaeb590d35cf2a2/jcvi/graphics/glyph.py#L140-L154
import os.path as op import sys import numpy as np from random import choice, shuffle, random, randint from jcvi.apps.base import OptionParser, ActionDispatcher from jcvi.graphics.base import ( plt, Rectangle, CirclePolygon, Ellipse, FancyArrowPatch, Polygon, savefig, set3, get_map, ) from jcvi.utils.grouper import Grouper tstep = 0.05 Timing = np.arange(0, 1 + tstep, tstep) arrowprops = dict( arrowstyle="fancy", fc="lightslategray", ec="lightslategray", connectionstyle="arc3,rad=-0.05", ) class Bezier(object): def __init__(self, ax, p0, p1, p2, p3, color="m", alpha=0.2): pts = (p0, p1, p2, p3) px, py = zip(*pts) xt = self.get_array(px) yt = self.get_array(py) ax.plot(xt, yt, "-", color=color, alpha=alpha) def get_array(self, pts, t=Timing): p0, p1, p2, p3 = pts c = 3 * (p1 - p0) b = 3 * (p2 - p1) - c a = p3 - p0 - c - b tsquared = t ** 2 tcubic = tsquared * t return a * tcubic + b * tsquared + c * t + p0 class RoundLabel(object): def __init__(self, ax, x1, x2, t, lw=0, fill=False, fc="lavender", **kwargs): ax.text( x1, x2, t, ha="center", bbox=dict(boxstyle="round", fill=fill, fc=fc, lw=lw), **kwargs ) class RoundRect(object): def __init__(self, ax, xy, width, height, shrink=0.1, label=None, **kwargs): shrink *= height x, y = xy pts = [] pts += plot_cap( (x + width - shrink, y + height - shrink), np.radians(range(0, 90)), shrink ) pts += [[x + width - shrink, y + height], [x + shrink, y + height]] pts += plot_cap( (x + shrink, y + height - shrink), np.radians(range(90, 180)), shrink ) pts += [[x, y + height - shrink], [x, y + shrink]] pts += plot_cap((x + shrink, y + shrink), np.radians(range(180, 270)), shrink) pts += [[x + shrink, y], [x + width - shrink, y]] pts += plot_cap( (x + width - shrink, y + shrink), np.radians(range(270, 360)), shrink ) pts += [[x + width, y + shrink], [x + width, y + height - shrink]] p1 = Polygon(pts, **kwargs) ax.add_patch(p1) if label: ax.text( x + width / 2, y + height / 2, label, size=10, ha="center", va="center", color="w", ) class DoubleSquare(object): def __init__(self, ax, x, y, radius=0.01, **kwargs): d = radius * 1.5 ax.add_patch(Rectangle((x - d, y - d), 2 * d, 2 * d, fc="w", ec="k", zorder=10)) d = radius ax.add_patch(Rectangle((x - d, y - d), 2 * d, 2 * d, zorder=10, **kwargs)) class DoubleCircle(object): def __init__(self, ax, x, y, radius=0.01, **kwargs): ax.add_patch(CirclePolygon((x, y), radius * 1.4, resolution=50, fc="w", ec="k")) ax.add_patch(CirclePolygon((x, y), radius, resolution=50, **kwargs))
BSD 2-Clause Simplified License
powerapi-ng/powerapi
powerapi/dispatcher/dispatcher_actor.py
FormulaNameService.remove_formula
python
def remove_formula(self, formula_name_to_remove: str): formula_id_to_remove = None for formula_id, formula_name in self.formula_name.items(): if formula_name_to_remove == formula_name: formula_id_to_remove = formula_id if formula_id_to_remove is not None: del self.formula_name[formula_id_to_remove] else: raise AttributeError
remove from the pool the formula with the given address :param formula_address_to_remove: address of the formula to remove :raise AttributeError: if the pool doesn't contain any formula with this address
https://github.com/powerapi-ng/powerapi/blob/68b12f46a9da0805dd9be7f17f8f336ef5a6ce9c/powerapi/dispatcher/dispatcher_actor.py#L396-L409
from typing import Type, Tuple, List from thespian.actors import ActorAddress, ActorExitRequest, ChildActorExited, PoisonMessage from powerapi.actor import Actor, InitializationException from powerapi.formula import FormulaActor, FormulaValues from powerapi.dispatch_rule import DispatchRule from powerapi.utils import Tree from powerapi.report import Report from powerapi.message import StartMessage, DispatcherStartMessage, FormulaStartMessage, EndMessage, ErrorMessage, OKMessage from powerapi.dispatcher.blocking_detector import BlockingDetector from powerapi.dispatcher.route_table import RouteTable def _clean_list(id_list): id_list.sort() r_list = [] last_element = None for x in id_list: if x != last_element: r_list.append(x) last_element = x return r_list def _extract_formula_id(report: Report, dispatch_rule: DispatchRule, primary_dispatch_rule: DispatchRule) -> List[Tuple]: id_list = dispatch_rule.get_formula_id(report) if dispatch_rule.is_primary: return id_list def f(formula_id): return _match_report_id(formula_id, dispatch_rule, primary_dispatch_rule) return _clean_list(list(map(f, id_list))) def _match_report_id(report_id: Tuple, dispatch_rule: DispatchRule, primary_rule: DispatchRule) -> Tuple: new_report_id = () for i in range(len(report_id)): if i >= len(primary_rule.fields): return new_report_id if dispatch_rule.fields[i] == primary_rule.fields[i]: new_report_id += (report_id[i], ) else: return new_report_id return new_report_id class DispatcherActor(Actor): def __init__(self): Actor.__init__(self, DispatcherStartMessage) self.formula_class: Type[FormulaActor] = None self.formula_values: FormulaValues = None self.route_table: RouteTable = None self.device_id = None self._exit_mode = False self.formula_name_service = None self.formula_waiting_service = FormulaWaitingService() self.formula_pool = {} self.formula_number_id = 0 def _initialization(self, message: StartMessage): Actor._initialization(self, message) self.formula_class = message.formula_class self.formula_values = message.formula_values self.route_table = message.route_table self.device_id = message.device_id self.formula_name_service = FormulaNameService() if self.route_table.primary_dispatch_rule is None: raise InitializationException('Dispatcher initialized without primary dispatch rule') def receiveMsg_PoisonMessage(self, message: PoisonMessage, sender: ActorAddress): poison_message = message.poisonMessage for formula_name, (formula, blocking_detector) in self.formula_pool.items(): if sender == formula: log_line = 'received poison messsage from formula ' + formula_name + ' for message ' + str(poison_message) log_line += 'with this error stack : ' + message.details self.log_debug(log_line) blocking_detector.notify_poison_received(poison_message) if blocking_detector.is_blocked(): self.log_debug('formula ' + formula_name + ' is blocked : ' + str(blocking_detector.is_blocked())) self.log_debug('restart formula ' + formula_name) self.log_error('formula ' + formula_name + ' is blocked after this error : ' + message.details) self._restart_formula(formula_name) return def receiveMsg_ActorExitRequest(self, message: ActorExitRequest, sender: ActorAddress): Actor.receiveMsg_ActorExitRequest(self, message, sender) for _, (formula, __) in self.formula_pool.items(): self.send(formula, ActorExitRequest()) for _, formula in self.formula_waiting_service.get_all_formula(): self.send(formula, ActorExitRequest()) def _gen_formula_name(self, formula_id): name = 'formula' + str(self.formula_number_id) self.formula_number_id += 1 for field in formula_id: name += '__' + str(field) return name def _send_message(self, formula_name, message): try: formula, blocking_detector = self.formula_pool[formula_name] message.dispatcher_report_id = blocking_detector.get_message_id() self.log_debug('send ' + str(message) + ' to ' + formula_name) self.send(formula, message) except KeyError: self.formula_waiting_service.add_message(formula_name, message) def receiveMsg_Report(self, message: Report, _: ActorAddress): self.log_debug('received ' + str(message)) dispatch_rule = self.route_table.get_dispatch_rule(message) primary_dispatch_rule = self.route_table.primary_dispatch_rule if dispatch_rule is None: self.log_warning('no dispatch rule for report ' + str(message)) return formula_ids = _extract_formula_id(message, dispatch_rule, primary_dispatch_rule) for formula_id in formula_ids: primary_rule_fields = primary_dispatch_rule.fields if len(formula_id) == len(primary_rule_fields): try: formula_name = self.formula_name_service.get_direct_formula_name(formula_id) self._send_message(formula_name, message) except KeyError: formula_name = self._gen_formula_name(formula_id) self.log_info('create formula ' + formula_name) formula = self._create_formula(formula_id, formula_name) self.formula_name_service.add(formula_id, formula_name) self.formula_waiting_service.add(formula_name, formula) self.formula_waiting_service.add_message(formula_name, message) else: for formula_name in self.formula_name_service.get_corresponding_formula(list(formula_id)): self._send_message(formula_name, message) def _get_formula_name_from_address(self, formula_address: ActorAddress): for name, (address, _) in self.formula_pool.items(): if formula_address == address: return name return self.formula_waiting_service.get_formula_by_address(formula_address) def receiveMsg_ChildActorExited(self, message: ChildActorExited, _: ActorAddress): try: formula_name = self._get_formula_name_from_address(message.childAddress) except AttributeError: return self.formula_name_service.remove_formula(formula_name) del self.formula_pool[formula_name] if self._exit_mode and self.formula_pool == {}: for _, pusher in self.formula_values.pushers.items(): self.send(pusher, EndMessage(self.name)) self.send(self.myAddress, ActorExitRequest()) def receiveMsg_ErrorMessage(self, message: ErrorMessage, _: ActorAddress): self.log_info('error while trying to start ' + message.sender_name + ' : ' + message.error_message) self.formula_waiting_service.remove_formula(message.sender_name) def receiveMsg_OKMessage(self, message: OKMessage, sender: ActorAddress): formula_name = message.sender_name waiting_messages = self.formula_waiting_service.get_waiting_messages(formula_name) self.formula_waiting_service.remove_formula(formula_name) self.formula_pool[formula_name] = (sender, BlockingDetector()) for waiting_msg in waiting_messages: self._send_message(formula_name, waiting_msg) self.log_info('formula ' + formula_name + 'started') def receiveMsg_EndMessage(self, message: EndMessage, _: ActorAddress): self.log_debug('received message ' + str(message)) self._exit_mode = True for _, (formula, __) in self.formula_pool.items(): self.send(formula, EndMessage(self.name)) for formula_name, _ in self.formula_waiting_service.get_all_formula(): self.formula_waiting_service.add_message(formula_name, message) def _restart_formula(self, formula_name: str): formula, _ = self.formula_pool[formula_name] formula_id = self.formula_name_service.get_formula_id(formula_name) self.formula_name_service.remove_formula(formula_name) del self.formula_pool[formula_name] self.send(formula, ActorExitRequest()) new_name = self._gen_formula_name(formula_id) formula = self._create_formula(formula_id, new_name) self.formula_name_service.add(formula_id, new_name) self.formula_waiting_service.add(new_name, formula) self.log_debug('restart formula' + formula_name + ' with new name : ' + new_name) def _create_formula(self, formula_id: Tuple, formula_name: str) -> ActorAddress: formula = self.createActor(self.formula_class) domain_values = self.formula_class.gen_domain_values(self.device_id, formula_id) start_message = FormulaStartMessage(self.name, formula_name, self.formula_values, domain_values) self.send(formula, start_message) return formula class FormulaWaitingService: def __init__(self): self.formulas = {} self.waiting_messages = {} def get_all_formula(self) -> List[Tuple[str, ActorAddress]]: return self.formulas.items() def add(self, formula_name: str, formula_address: ActorAddress): self.formulas[formula_name] = formula_address self.waiting_messages[formula_name] = [] def add_message(self, formula_name: str, message: Report): self.waiting_messages[formula_name].append(message) def get_waiting_messages(self, formula_name: str) -> List[Report]: if formula_name in self.formulas: return self.waiting_messages[formula_name] raise AttributeError('unknow formula ' + str(formula_name)) def get_formula_by_address(self, formula_address: ActorAddress) -> str: for name, address in self.formulas.items(): if formula_address == address: return name raise AttributeError('no such formula with address ' + str(formula_address)) def remove_formula(self, formula_name: str): if formula_name in self.formulas: del self.formulas[formula_name] del self.waiting_messages[formula_name] else: raise AttributeError('unknow formula ' + str(formula_name)) class FormulaNameService: def __init__(self): self.formula_name = {} self.formula_tree = Tree() def add(self, formula_id, formula_name: str): self.formula_name[formula_id] = formula_name self.formula_tree.add(list(formula_id), formula_name) def get_direct_formula_name(self, formula_id) -> str: return self.formula_name[formula_id] def get_formula_id(self, formula_name_to_find: str) -> Tuple: for formula_id, formula_name in self.formula_name.items(): if formula_name == formula_name_to_find: return formula_id return None def get_corresponding_formula(self, formula_id): return self.formula_tree.get(formula_id)
BSD 3-Clause New or Revised License
artyompal/tpu_models
models/official/mask_rcnn/anchors.py
AnchorLabeler.__init__
python
def __init__(self, anchors, num_classes, match_threshold=0.7, unmatched_threshold=0.3, rpn_batch_size_per_im=256, rpn_fg_fraction=0.5): similarity_calc = region_similarity_calculator.IouSimilarity() matcher = argmax_matcher.ArgMaxMatcher( match_threshold, unmatched_threshold=unmatched_threshold, negatives_lower_than_unmatched=True, force_match_for_each_row=True) box_coder = faster_rcnn_box_coder.FasterRcnnBoxCoder() self._target_assigner = target_assigner.TargetAssigner( similarity_calc, matcher, box_coder) self._anchors = anchors self._match_threshold = match_threshold self._unmatched_threshold = unmatched_threshold self._rpn_batch_size_per_im = rpn_batch_size_per_im self._rpn_fg_fraction = rpn_fg_fraction self._num_classes = num_classes
Constructs anchor labeler to assign labels to anchors. Args: anchors: an instance of class Anchors. num_classes: integer number representing number of classes in the dataset. match_threshold: a float number between 0 and 1 representing the lower-bound threshold to assign positive labels for anchors. An anchor with a score over the threshold is labeled positive. unmatched_threshold: a float number between 0 and 1 representing the upper-bound threshold to assign negative labels for anchors. An anchor with a score below the threshold is labeled negative. rpn_batch_size_per_im: a integer number that represents the number of sampled anchors per image in the first stage (region proposal network). rpn_fg_fraction: a float number between 0 and 1 representing the fraction of positive anchors (foreground) in the first stage.
https://github.com/artyompal/tpu_models/blob/639306f30e085bb1cdb5b1118a4c96a2dbe14e3e/models/official/mask_rcnn/anchors.py#L173-L207
from __future__ import absolute_import from __future__ import division from __future__ import print_function from collections import OrderedDict import numpy as np import tensorflow as tf from object_detection import argmax_matcher from object_detection import balanced_positive_negative_sampler from object_detection import box_list from object_detection import faster_rcnn_box_coder from object_detection import region_similarity_calculator from object_detection import target_assigner def _generate_anchor_configs(min_level, max_level, num_scales, aspect_ratios): anchor_configs = {} for level in range(min_level, max_level + 1): anchor_configs[level] = [] for scale_octave in range(num_scales): for aspect in aspect_ratios: anchor_configs[level].append( (2**level, scale_octave / float(num_scales), aspect)) return anchor_configs def _generate_anchor_boxes(image_size, anchor_scale, anchor_configs): boxes_all = [] for _, configs in anchor_configs.items(): boxes_level = [] for config in configs: stride, octave_scale, aspect = config if image_size[0] % stride != 0 or image_size[1] % stride != 0: raise ValueError('input size must be divided by the stride.') base_anchor_size = anchor_scale * stride * 2**octave_scale anchor_size_x_2 = base_anchor_size * aspect[0] / 2.0 anchor_size_y_2 = base_anchor_size * aspect[1] / 2.0 x = np.arange(stride / 2, image_size[1], stride) y = np.arange(stride / 2, image_size[0], stride) xv, yv = np.meshgrid(x, y) xv = xv.reshape(-1) yv = yv.reshape(-1) boxes = np.vstack((yv - anchor_size_y_2, xv - anchor_size_x_2, yv + anchor_size_y_2, xv + anchor_size_x_2)) boxes = np.swapaxes(boxes, 0, 1) boxes_level.append(np.expand_dims(boxes, axis=1)) boxes_level = np.concatenate(boxes_level, axis=1) boxes_all.append(boxes_level.reshape([-1, 4])) anchor_boxes = np.vstack(boxes_all) return anchor_boxes class Anchors(object): def __init__(self, min_level, max_level, num_scales, aspect_ratios, anchor_scale, image_size): self.min_level = min_level self.max_level = max_level self.num_scales = num_scales self.aspect_ratios = aspect_ratios self.anchor_scale = anchor_scale self.image_size = image_size self.config = self._generate_configs() self.boxes = self._generate_boxes() def _generate_configs(self): return _generate_anchor_configs(self.min_level, self.max_level, self.num_scales, self.aspect_ratios) def _generate_boxes(self): boxes = _generate_anchor_boxes(self.image_size, self.anchor_scale, self.config) boxes = tf.convert_to_tensor(boxes, dtype=tf.float32) return boxes def get_anchors_per_location(self): return self.num_scales * len(self.aspect_ratios) def get_unpacked_boxes(self): return self.unpack_labels(self.boxes) def unpack_labels(self, labels): labels_unpacked = OrderedDict() count = 0 for level in range(self.min_level, self.max_level + 1): feat_size0 = int(self.image_size[0] / 2**level) feat_size1 = int(self.image_size[1] / 2**level) steps = feat_size0 * feat_size1 * self.get_anchors_per_location() indices = tf.range(count, count + steps) count += steps labels_unpacked[level] = tf.reshape( tf.gather(labels, indices), [feat_size0, feat_size1, -1]) return labels_unpacked class AnchorLabeler(object):
Apache License 2.0
rjt1990/pyflux
pyflux/gas/gas.py
GAS._sim_prediction_bayes
python
def _sim_prediction_bayes(self, h, simulations): sim_vector = np.zeros([simulations,h]) for n in range(0,simulations): t_z = self.draw_latent_variables(nsims=1).T[0] theta, Y, scores = self._model(t_z) t_z = np.array([self.latent_variables.z_list[k].prior.transform(t_z[k]) for k in range(t_z.shape[0])]) model_scale, model_shape, model_skewness = self._get_scale_and_shape(t_z) Y_exp = Y.copy() theta_exp = theta.copy() scores_exp = scores.copy() for t in range(0, h): new_value = t_z[0] if self.ar != 0: for j in range(1, self.ar+1): new_value += t_z[j]*theta_exp[-j] if self.sc != 0: for k in range(1, self.sc+1): new_value += t_z[k+self.ar]*scores_exp[-k] if self.model_name2 == "Exponential": rnd_value = self.family.draw_variable(1.0/self.link(new_value), model_scale, model_shape, model_skewness, 1)[0] else: rnd_value = self.family.draw_variable(self.link(new_value), model_scale, model_shape, model_skewness, 1)[0] Y_exp = np.append(Y_exp, [rnd_value]) theta_exp = np.append(theta_exp, [new_value]) scores_exp = np.append(scores_exp, scores[np.random.randint(scores.shape[0])]) sim_vector[n] = Y_exp[-h:] return np.transpose(sim_vector)
Simulates a h-step ahead mean prediction Parameters ---------- h : int How many steps ahead for the prediction simulations : int How many simulations to perform Returns ---------- Matrix of simulations
https://github.com/rjt1990/pyflux/blob/297f2afc2095acd97c12e827dd500e8ea5da0c0f/pyflux/gas/gas.py#L477-L530
import sys if sys.version_info < (3,): range = xrange import numpy as np import pandas as pd import scipy.stats as ss import scipy.special as sp from .. import families as fam from .. import tsm as tsm from .. import data_check as dc from .gas_core_recursions import gas_recursion class GAS(tsm.TSM): def __init__(self, data, ar, sc, family, integ=0, target=None): super(GAS,self).__init__('GAS') self.ar = ar self.sc = sc self.integ = integ self.z_no = self.ar + self.sc + 1 self.max_lag = max(self.ar,self.sc) self._z_hide = 0 self.supported_methods = ["MLE","PML","Laplace","M-H","BBVI"] self.default_method = "MLE" self.multivariate_model = False self.data, self.data_name, self.is_pandas, self.index = dc.data_check(data,target) self.data = self.data.astype(np.float) self.data_original = self.data.copy() for order in range(0,self.integ): self.data = np.diff(self.data) self.data_name = "Differenced " + self.data_name self.data_length = self.data.shape[0] self._create_model_matrices() self._create_latent_variables() self.family = family self.model_name2, self.link, self.scale, self.shape, self.skewness, self.mean_transform, self.cythonized = self.family.setup() if self.cythonized is True: self._model = self._cythonized_model self._mb_model = self._cythonized_mb_model if self.family.gradient_only is True: self.recursion = self.family.gradient_recursion() else: self.recursion = self.family.newton_recursion() else: self._model = self._uncythonized_model self._mb_model = self._uncythonized_mb_model self.model_name = self.model_name2 + "GAS (" + str(self.ar) + "," + str(self.integ) + "," + str(self.sc) + ")" for no, i in enumerate(self.family.build_latent_variables()): self.latent_variables.add_z(i[0], i[1], i[2]) self.latent_variables.z_list[1+no+self.ar+self.sc].start = i[3] self.family_z_no = len(self.family.build_latent_variables()) self.latent_variables.z_list[0].start = self.mean_transform(np.mean(self.data)) self.z_no = len(self.latent_variables.z_list) def _create_model_matrices(self): self.model_Y = np.array(self.data[self.max_lag:self.data.shape[0]]) self.model_scores = np.zeros(self.model_Y.shape[0]) def _create_latent_variables(self): self.latent_variables.add_z('Constant', fam.Normal(0, 3, transform=None), fam.Normal(0, 3)) for ar_term in range(self.ar): self.latent_variables.add_z('AR(' + str(ar_term+1) + ')', fam.Normal(0, 0.5, transform=None), fam.Normal(0, 3)) for sc_term in range(self.sc): self.latent_variables.add_z('SC(' + str(sc_term+1) + ')', fam.Normal(0, 0.5, transform=None), fam.Normal(0, 3)) def _get_scale_and_shape(self,parm): if self.scale is True: if self.shape is True: model_shape = parm[-1] model_scale = parm[-2] else: model_shape = 0 model_scale = parm[-1] else: model_scale = 0 model_shape = 0 if self.skewness is True: model_skewness = parm[-3] else: model_skewness = 0 return model_scale, model_shape, model_skewness def _get_scale_and_shape_sim(self, transformed_lvs): if self.scale is True: if self.shape is True: model_shape = self.latent_variables.z_list[-1].prior.transform(transformed_lvs[-1, :]) model_scale = self.latent_variables.z_list[-2].prior.transform(transformed_lvs[-2, :]) else: model_shape = np.zeros(transformed_lvs.shape[1]) model_scale = self.latent_variables.z_list[-1].prior.transform(transformed_lvs[-1, :]) else: model_scale = np.zeros(transformed_lvs.shape[1]) model_shape = np.zeros(transformed_lvs.shape[1]) if self.skewness is True: model_skewness = self.latent_variables.z_list[-3].prior.transform(transformed_lvs[-3, :]) else: model_skewness = np.zeros(transformed_lvs.shape[1]) return model_scale, model_shape, model_skewness def _cythonized_model(self, beta): parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])]) theta = np.ones(self.model_Y.shape[0])*parm[0] model_scale, model_shape, model_skewness = self._get_scale_and_shape(parm) theta, self.model_scores = self.recursion(parm, theta, self.model_scores, self.model_Y, self.ar, self.sc, self.model_Y.shape[0], model_scale, model_shape, model_skewness, self.max_lag) return np.array(theta), self.model_Y, np.array(self.model_scores) def _cythonized_mb_model(self, beta, mini_batch): rand_int = np.random.randint(low=0, high=self.data.shape[0]-mini_batch-self.max_lag+1) sample = np.arange(start=rand_int, stop=rand_int+mini_batch) Y = self.model_Y[sample] parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])]) theta = np.ones(Y.shape[0])*parm[0] model_scale, model_shape, model_skewness = self._get_scale_and_shape(parm) theta, self.model_scores = self.recursion(parm, theta, self.model_scores, Y, self.ar, self.sc, Y.shape[0], model_scale, model_shape, model_skewness, self.max_lag) return np.array(theta), Y, np.array(self.model_scores) def _uncythonized_model(self, beta): parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])]) theta = np.ones(self.model_Y.shape[0])*parm[0] model_scale, model_shape, model_skewness = self._get_scale_and_shape(parm) theta, self.model_scores = gas_recursion(parm, theta, self.model_scores, self.model_Y, self.ar, self.sc, self.model_Y.shape[0], self.family.score_function, self.link, model_scale, model_shape, model_skewness, self.max_lag) return theta, self.model_Y, self.model_scores def _uncythonized_mb_model(self, beta, mini_batch): rand_int = np.random.randint(low=0, high=self.data.shape[0]-mini_batch-self.max_lag+1) sample = np.arange(start=rand_int, stop=rand_int+mini_batch) Y = self.model_Y[sample] parm = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])]) theta = np.ones(Y.shape[0])*parm[0] model_scale, model_shape, model_skewness = self._get_scale_and_shape(parm) theta, self.model_scores = gas_recursion(parm, theta, self.model_scores, Y, self.ar, self.sc, Y.shape[0], self.family.score_function, self.link, model_scale, model_shape, model_skewness, self.max_lag) return theta, Y, self.model_scores def _mean_prediction(self, theta, Y, scores, h, t_params): Y_exp = Y.copy() theta_exp = theta.copy() scores_exp = scores.copy() for t in range(0,h): new_value = t_params[0] if self.ar != 0: for j in range(1,self.ar+1): new_value += t_params[j]*theta_exp[-j] if self.sc != 0: for k in range(1,self.sc+1): new_value += t_params[k+self.ar]*scores_exp[-k] if self.model_name2 == "Exponential": Y_exp = np.append(Y_exp, [1.0/self.link(new_value)]) else: Y_exp = np.append(Y_exp, [self.link(new_value)]) theta_exp = np.append(theta_exp, [new_value]) scores_exp = np.append(scores_exp, [0]) return Y_exp def _preoptimize_model(self, initials, method): if not (self.ar==0 and self.sc == 0): toy_model = GAS(ar=0, sc=0, integ=self.integ, family=self.family, data=self.data_original) toy_model.fit(method) self.latent_variables.z_list[0].start = toy_model.latent_variables.get_z_values(transformed=False)[0] for extra_z in range(len(self.family.build_latent_variables())): self.latent_variables.z_list[1+self.ar+self.sc+extra_z].start = toy_model.latent_variables.get_z_values(transformed=False)[1+extra_z] random_starts = np.random.normal(0.3, 0.3, [self.ar+self.sc, 1000]) best_start = self.latent_variables.get_z_starting_values() best_lik = self.neg_loglik(self.latent_variables.get_z_starting_values()) proposal_start = best_start.copy() for start in range(random_starts.shape[1]): proposal_start[1:1+self.ar+self.sc] = random_starts[:,start] proposal_start[0] = proposal_start[0]*(1.0-np.sum(random_starts[:self.ar,start])) proposal_likelihood = self.neg_loglik(proposal_start) if proposal_likelihood < best_lik: best_lik = proposal_likelihood best_start = proposal_start.copy() return best_start else: return initials def _sim_prediction(self, theta, Y,scores, h, t_params, simulations): model_scale, model_shape, model_skewness = self._get_scale_and_shape(t_params) sim_vector = np.zeros([simulations,h]) for n in range(0,simulations): Y_exp = Y.copy() theta_exp = theta.copy() scores_exp = scores.copy() for t in range(0, h): new_value = t_params[0] if self.ar != 0: for j in range(1, self.ar+1): new_value += t_params[j]*theta_exp[-j] if self.sc != 0: for k in range(1, self.sc+1): new_value += t_params[k+self.ar]*scores_exp[-k] if self.model_name2 == "Exponential": rnd_value = self.family.draw_variable(1.0/self.link(new_value), model_scale, model_shape, model_skewness, 1)[0] else: rnd_value = self.family.draw_variable(self.link(new_value), model_scale, model_shape, model_skewness, 1)[0] Y_exp = np.append(Y_exp, [rnd_value]) theta_exp = np.append(theta_exp, [new_value]) scores_exp = np.append(scores_exp, scores[np.random.randint(scores.shape[0])]) sim_vector[n] = Y_exp[-h:] return np.transpose(sim_vector)
BSD 3-Clause New or Revised License
doc-doc/next-qa
networks/q_v_transformer.py
padding_mask_q
python
def padding_mask_q(seq_q, seq_k): fake_k = torch.ones_like(seq_k) pad_mask = torch.bmm(seq_q, fake_k.transpose(1, 2)) pad_mask = pad_mask.eq(0) return pad_mask
seq_k of shape (batch, k_len, k_feat) and seq_q (batch, q_len, q_feat). q and k are padded with 0. pad_mask is (batch, q_len, k_len). In batch 0: [[x x x x] [[0 0 0 0] [x x x x] -> [0 0 0 0] [0 0 0 0]] [1 1 1 1]] uint8
https://github.com/doc-doc/next-qa/blob/f54f850a91e64dca4452598154838924548f3b2f/networks/q_v_transformer.py#L48-L59
import torch import torch.nn as nn import torch.nn.functional as F import numpy as np import torchnlp_nn as nlpnn def padding_mask(seq_q, seq_k): seq_q = torch.unsqueeze(seq_q, 2) seq_k = torch.unsqueeze(seq_k, 2) pad_mask = torch.bmm(seq_q, seq_k.transpose(1, 2)) pad_mask = pad_mask.eq(0) return pad_mask def padding_mask_transformer(seq_q, seq_k): len_q = seq_q.size(1) pad_mask = seq_k.eq(0) pad_mask = pad_mask.unsqueeze(1).expand( -1, len_q, -1) return pad_mask def padding_mask_embedded(seq_q, seq_k): pad_mask = torch.bmm(seq_q, seq_k.transpose(1, 2)) pad_mask = pad_mask.eq(0) return pad_mask def padding_mask_k(seq_q, seq_k): fake_q = torch.ones_like(seq_q) pad_mask = torch.bmm(fake_q, seq_k.transpose(1, 2)) pad_mask = pad_mask.eq(0) return pad_mask
MIT License
rsmusllp/king-phisher
king_phisher/client/widget/completion_providers.py
JinjaCompletionProvider.populate
python
def populate(self, context, match): proposal_terms = [] if match.group('is_filter'): jinja_filter = match.group('filter') or '' proposal_terms = [term for term in self.jinja_filters if term.startswith(jinja_filter)] elif match.group('is_test'): jinja_test = match.group('test') or '' proposal_terms = [term for term in self.jinja_tests if term.startswith(jinja_test)] elif match.group('var'): tokens = match.group('var') tokens = tokens.split('.') proposal_terms = get_proposal_terms(self.jinja_tokens, tokens) proposal_terms = [(term.split('(', 1)[0], term) for term in proposal_terms] return proposal_terms
Utilizes the match from the regular expression check to check for possible matches of :py:attr:`.jinja_vars`. :param context: The context for the completion. :type context: :py:class:`GtkSource.CompletionContext` :param match: The matching object. :types match: `re.MatchObject` :return: List of strings to be used for creation of proposals. :rtype: list
https://github.com/rsmusllp/king-phisher/blob/6acbbd856f849d407cc904c075441e0cf13c25cf/king_phisher/client/widget/completion_providers.py#L289-L313
import logging import os import re from king_phisher import find from king_phisher import its from king_phisher import serializers from gi.repository import GObject from gi.repository import Gtk from gi.repository import GtkSource if its.mocked: _GObject_GObject = type('GObject.GObject', (object,), {'__module__': ''}) _GtkSource_CompletionProvider = type('GtkSource.CompletionProvider', (object,), {'__module__': ''}) else: _GObject_GObject = GObject.GObject _GtkSource_CompletionProvider = GtkSource.CompletionProvider def get_proposal_terms(search, tokens): if isinstance(tokens, str): tokens = [tokens] found = search.get(tokens[0], {}) if found: if tokens[1:]: found = get_proposal_terms(found, tokens[1:]) else: found = [] else: token_0 = tokens[0] found = [term for term in search.keys() if term.startswith(token_0) and term != token_0] return found class CustomCompletionProviderBase(GObject.GObject, GtkSource.CompletionProvider): data_file = None extraction_regex = r'' left_delimiter = None left_delimiter_adjustment = 0 left_limit = 512 name = 'Undefined' def __init__(self): super(CustomCompletionProviderBase, self).__init__() self.logger = logging.getLogger('KingPhisher.Client.' + self.__class__.__name__) if self.data_file is not None: completion_data = find.data_file(os.path.join('completion', self.data_file)) if completion_data is None: raise RuntimeError("failed to find completion data file '{0}'".format(self.data_file)) self.logger.debug("loading {0} completion data from: {1}".format(self.name, completion_data)) with open(completion_data, 'r') as file_h: completion_data = serializers.JSON.load(file_h) self.load_data(completion_data) def do_get_name(self): return self.name def load_data(self, completion_data): pass def populate(self, context, match): raise NotImplementedError() def extract(self, context): end_iter = context.get_iter() if not isinstance(end_iter, Gtk.TextIter): _, end_iter = context.get_iter() if not end_iter: return buf = end_iter.get_buffer() mov_iter = end_iter.copy() limit_iter = end_iter.copy() if self.left_limit: limit_iter.backward_chars(self.left_limit) mov_iter = mov_iter.backward_search(self.left_delimiter, Gtk.TextSearchFlags.VISIBLE_ONLY, limit=limit_iter) if not mov_iter: return mov_iter, _ = mov_iter if self.left_delimiter_adjustment > 0: mov_iter.forward_chars(self.left_delimiter_adjustment) elif self.left_delimiter_adjustment < 0: mov_iter.backward_chars(abs(self.left_delimiter_adjustment)) left_text = buf.get_text(mov_iter, end_iter, True) return self.extraction_regex.match(left_text) def do_match(self, context): return self.extract(context) is not None def do_populate(self, context): match = self.extract(context) if match is None: return proposals = [] try: matching_suggestions = self.populate(context, match) except Exception: self.logger.warning('encountered an exception in the completion populate routine', exc_info=True) return matching_suggestions.sort() for suggestion in matching_suggestions: if not suggestion: continue if isinstance(suggestion, str): item = GtkSource.CompletionItem(label=suggestion, text=suggestion) else: item = GtkSource.CompletionItem(label=suggestion[0], text=suggestion[1]) proposals.append(item) context.add_proposals(self, proposals, True) class HTMLCompletionProvider(CustomCompletionProviderBase): data_file = 'html.json' left_delimiter = '<' extraction_regex = re.compile( r'<(?P<tag>[a-z]+)' r'(?P<is_attr>\s+(?:[a-z_]+(=(?P<quote>["\'])(?:(\\.|[^\4])*)\4)?\s+)*(?P<attr>[a-z_]*))?' r'$' ) name = 'HTML' def load_data(self, completion_data): self.html_tags = completion_data def populate(self, context, match): proposal_terms = [] tag = match.group('tag') if match.group('is_attr'): if tag in self.html_tags: comp_attr = match.group('attr') or '' attrs = (self.html_tags[tag] or []) + ['class', 'id', 'style', 'title'] proposal_terms = [term for term in attrs if term.startswith(comp_attr)] proposal_terms = [(term[:-1], term[:-1] + ' ') if term[-1] == '!' else (term, term + '="') for term in proposal_terms] else: proposal_terms = [(term, term + ' ') for term in self.html_tags.keys() if term.startswith(tag)] return proposal_terms class JinjaCompletionProvider(CustomCompletionProviderBase): data_file = 'jinja.json' left_delimiter = '{' left_delimiter_adjustment = -1 extraction_regex = re.compile( r'.*(?:{{\s*|{%\s+(?:if|elif|for\s+[a-z_]+\s+in)\s+)(?P<var>[a-z_.]+)' r'(' r'(?P<is_test>\s+is\s+(?P<test>[a-z_]+))' r'|' r'(?P<is_filter>\s*\|\s*(?:[a-z_]+\s*\|\s*)*(?P<filter>[a-z_]*(?!\|)))' r')?' r'$' ) name = 'Jinja' var_context = None def load_data(self, completion_data): self.jinja_filters = completion_data['global']['filters'] self.jinja_tests = completion_data['global']['tests'] self.jinja_tokens = completion_data['global']['tokens'] if self.var_context is not None: context = completion_data['context'] if not self.var_context in context: raise RuntimeError('the specified context is not defined') context = context[self.var_context] if 'filters' in context: self.jinja_filters.extend(context['filters']) if 'tests' in context: self.jinja_tests.extend(context['tests']) if 'tokens' in context: self.jinja_tokens.update(context['tokens'])
BSD 3-Clause New or Revised License
volfpeter/markyp-bootstrap4
markyp_bootstrap4/modals.py
modal_dialog_base
python
def modal_dialog_base(*args: ElementType, centered: bool = False, class_: Optional[str] = None, **kwargs: PropertyValue) -> div: return div( *args, class_=join("modal-dialog", "modal-dialog-centered" if centered else None, class_), role="document", **kwargs )
Creates a `modal-dialog` `div` that is the element right inside `modal_element`. Positional arguments will become the children elements of the created `div`. Keyword arguments not listed in the arguments section are turned into element attributes on the created `div`. Arguments: centered: Whether to center the content of the modal. class_: Additional CSS class names to set on the created `div`.
https://github.com/volfpeter/markyp-bootstrap4/blob/1af5a1f9dc861a14323706ace28882ef6555739a/markyp_bootstrap4/modals.py#L181-L202
from typing import List, Optional, Type from markyp import ElementType, PropertyDict, PropertyValue, elements from markyp_html import join from markyp_html.block import div from markyp_html.forms import button from markyp_html.text import StyledTextFactory from markyp_bootstrap4.buttons import ElementButtonFactory __all__ = ( "title", "CloseButtonFactory", "close_button", "ModalToggleButtonFactory", "toggle_button", "modal", "modal_element", "modal_dialog_base", "modal_content", "modal_header", "modal_body", "modal_footer" ) title: StyledTextFactory = StyledTextFactory("modal-title") class CloseButtonFactory(ElementButtonFactory): __slots__ = () def __init__(self, generator: Optional[Type[elements.Element]] = None) -> None: super().__init__(generator or button) def update_attributes(self, attributes: PropertyDict, *, disabled: bool = False, active: bool = False) -> PropertyDict: attributes = super().update_attributes(attributes, disabled=disabled, active=active) attributes["data-dismiss"] = "modal" return attributes class ModalToggleButtonFactory(ElementButtonFactory): __slots__ = () def __init__(self, generator: Optional[Type[elements.Element]] = None) -> None: super().__init__(generator or button) def update_attributes(self, attributes: PropertyDict, *, disabled: bool = False, active: bool = False) -> PropertyDict: attributes = super().update_attributes(attributes, disabled=disabled, active=active) attributes["data-toggle"] = "modal" if "modal_id" in attributes: attributes["data-target"] = f"#{attributes['modal_id']}" del attributes["modal_id"] return attributes close_button: CloseButtonFactory = CloseButtonFactory() toggle_button: ModalToggleButtonFactory = ModalToggleButtonFactory() def modal(*args: ElementType, id: str, title: Optional[ElementType] = None, footer: Optional[ElementType] = None, add_close_button: bool = True, centered: bool = False, fade: bool = True, class_: Optional[str] = None, dialog_class: Optional[str] = None, content_class: Optional[str] = None, header_class: Optional[str] = None, body_class: Optional[str] = None, footer_class: Optional[str] = None) -> div: header: List[ElementType] = [] if title: header.append(title) if add_close_button: from markyp_html.entities import times from markyp_html.inline import span header.append(button( span(times, **{"aria-hidden": True}), type="button", class_="close", **{"data-dismiss": "modal", "aria-label": "Close"} )) return modal_element( modal_dialog_base( modal_content( modal_header(*header, class_=header_class) if len(header) > 0 else None, modal_body(*args, class_=body_class), modal_footer(footer, class_=footer_class) if footer is not None else None, class_=content_class ), centered=centered, class_=dialog_class ), class_=class_, fade=fade, id=id ) def modal_element(*args: ElementType, class_: Optional[str] = None, fade: bool = True, **kwargs: PropertyValue) -> div: return div( *args, class_=join("modal", "fade" if fade else None, class_), role="dialog", tabindex=-1, **kwargs )
MIT License
clusterhq/flocker
admin/test/test_packaging.py
parse_colon_dict
python
def parse_colon_dict(data): result = {} key = None for line in data.splitlines(): parts = [value.strip() for value in line.split(':', 1)] if len(parts) == 2: key, val = parts result[key] = val else: result.setdefault(key, '') result[key] += parts[0] return result
Parse colon seperated values into a dictionary, treating lines lacking a colon as continutation lines. Any leading lines without a colon will be associated with the key ``None``. This is the format output by ``rpm --query`` and ``dpkg --info``. :param bytes data: Data to parse :return: A ``dict`` containing the parsed data.
https://github.com/clusterhq/flocker/blob/eaa586248986d7cd681c99c948546c2b507e44de/admin/test/test_packaging.py#L122-L145
from glob import glob import platform from subprocess import check_output from textwrap import dedent from unittest import skipIf from StringIO import StringIO from twisted.python.filepath import FilePath from twisted.python.procutils import which from twisted.python.usage import UsageError from virtualenv import REQUIRED_MODULES as VIRTUALENV_REQUIRED_MODULES from flocker.testtools import TestCase, FakeSysModule from .. import packaging from ..packaging import ( omnibus_package_builder, InstallVirtualEnv, InstallApplication, BuildPackage, BuildSequence, BuildOptions, BuildScript, DockerBuildOptions, DockerBuildScript, GetPackageVersion, DelayedRpmVersion, CreateLinks, PythonPackage, create_virtualenv, VirtualEnv, PackageTypes, Distribution, Dependency, build_in_docker, PACKAGE, PACKAGE_PYTHON, PACKAGE_CLI, PACKAGE_NODE, PACKAGE_DOCKER_PLUGIN, make_dependencies, available_distributions, LintPackage, ) from flocker.common.version import RPMVersion FLOCKER_PATH = FilePath(__file__).parent().parent().parent() require_fpm = skipIf(not which('fpm'), "Tests require the ``fpm`` command.") require_rpm = skipIf(not which('rpm'), "Tests require the ``rpm`` command.") require_rpmlint = skipIf(not which('rpmlint'), "Tests require the ``rpmlint`` command.") require_dpkg = skipIf(not which('dpkg'), "Tests require the ``dpkg`` command.") require_lintian = skipIf(not which('lintian'), "Tests require the ``lintian`` command.") require_not_ubuntu = skipIf( platform.linux_distribution()[0] == 'Ubuntu', "rpmlint returns spurious results on Ubuntu: FLOC-3564.") DOCKER_SOCK = '/var/run/docker.sock' def assert_equal_steps(test_case, expected, actual): expected_steps = getattr(expected, 'steps') actual_steps = getattr(actual, 'steps') if None in (expected_steps, actual_steps): test_case.assertEqual(expected, actual) else: mismatch_steps = [] missing_steps = [] index = 0 for index, expected_step in enumerate(expected_steps): try: actual_step = actual_steps[index] except IndexError: missing_steps = expected_steps[index:] break if expected_step != actual_step: mismatch_steps.append( '* expected: {} !=\n' ' actual: {}'.format( expected_step, actual_step)) extra_steps = actual_steps[index+1:] if mismatch_steps or missing_steps or extra_steps: test_case.fail( 'Step Mismatch\n' 'Mismatch:\n{}\n' 'Missing:\n{}\n' 'Extra:\n{}'.format( '\n'.join(mismatch_steps), missing_steps, extra_steps) ) def assert_dict_contains(test_case, expected, actual, message=''): missing_items = [] mismatch_items = [] no_value = object() for key, expected_value in expected.items(): actual_value = actual.get(key, no_value) if actual_value is no_value: missing_items.append(key) elif actual_value != expected_value: mismatch_items.append( '{}: {} != {}'.format(key, expected_value, actual_value) ) if missing_items or mismatch_items: test_case.fail( '{}\n' 'Missing items: {}\n' 'Mismatch items: {}\n' 'Actual items: {}'.format( message, missing_items, mismatch_items, actual) )
Apache License 2.0
vlsida/openram
compiler/modules/port_data.py
port_data.place_write_driver_array
python
def place_write_driver_array(self, offset): self.write_driver_array_inst.place(offset=offset, mirror="MX")
Placing Write Driver
https://github.com/vlsida/openram/blob/f66aac3264598eeae31225c62b6a4af52412d407/compiler/modules/port_data.py#L415-L417
from tech import drc import debug import design import math from sram_factory import factory from collections import namedtuple from vector import vector from globals import OPTS from tech import cell_properties from tech import layer_properties as layer_props class port_data(design.design): def __init__(self, sram_config, port, num_spare_cols=None, bit_offsets=None, name="",): sram_config.set_local_config(self) self.port = port if self.write_size is not None: self.num_wmasks = int(math.ceil(self.word_size / self.write_size)) else: self.num_wmasks = 0 if num_spare_cols is not None: self.num_spare_cols = num_spare_cols + self.num_spare_cols if self.num_spare_cols is None: self.num_spare_cols = 0 if not bit_offsets: bitcell = factory.create(module_type=OPTS.bitcell) if(cell_properties.use_strap == True and OPTS.num_ports == 1): strap = factory.create(module_type=cell_properties.strap_module, version=cell_properties.strap_version) precharge_width = bitcell.width + strap.width else: precharge_width = bitcell.width self.bit_offsets = [] for i in range(self.num_cols + self.num_spare_cols): self.bit_offsets.append(i * precharge_width) else: self.bit_offsets = bit_offsets if name == "": name = "port_data_{0}".format(self.port) super().__init__(name) debug.info(2, "create data port of size {0} with {1} words per row".format(self.word_size, self.words_per_row)) self.create_netlist() if not OPTS.netlist_only: debug.check(len(self.all_ports)<=2, "Bank layout cannot handle more than two ports.") self.create_layout() self.add_boundary() def get_bl_names(self): return self.precharge.get_bl_names() def get_br_names(self): return self.precharge.get_br_names() def get_bl_name(self, port=0): return "bl_{}".format(port) def get_br_name(self, port=0): return "br_{}".format(port) def create_netlist(self): self.precompute_constants() self.add_pins() self.add_modules() self.create_instances() def create_instances(self): if self.precharge_array: self.create_precharge_array() else: self.precharge_array_inst = None if self.sense_amp_array: self.create_sense_amp_array() else: self.sense_amp_array_inst = None if self.write_driver_array: self.create_write_driver_array() if self.write_size is not None: self.create_write_mask_and_array() else: self.write_mask_and_array_inst = None else: self.write_driver_array_inst = None self.write_mask_and_array_inst = None if self.column_mux_array: self.create_column_mux_array() else: self.column_mux_array_inst = None def create_layout(self): self.compute_instance_offsets() self.place_instances() self.route_layout() self.DRC_LVS() def add_pins(self): self.add_pin("rbl_bl", "INOUT") self.add_pin("rbl_br", "INOUT") for bit in range(self.num_cols): self.add_pin("bl_{0}".format(bit), "INOUT") self.add_pin("br_{0}".format(bit), "INOUT") for bit in range(self.num_spare_cols): self.add_pin("sparebl_{0}".format(bit), "INOUT") self.add_pin("sparebr_{0}".format(bit), "INOUT") if self.port in self.read_ports: for bit in range(self.word_size + self.num_spare_cols): self.add_pin("dout_{}".format(bit), "OUTPUT") if self.port in self.write_ports: for bit in range(self.word_size + self.num_spare_cols): self.add_pin("din_{}".format(bit), "INPUT") sel_names = ["sel_{}".format(x) for x in range(self.num_col_addr_lines)] for pin_name in sel_names: self.add_pin(pin_name, "INPUT") if self.port in self.read_ports: self.add_pin("s_en", "INPUT") self.add_pin("p_en_bar", "INPUT") if self.port in self.write_ports: self.add_pin("w_en", "INPUT") for bit in range(self.num_wmasks): self.add_pin("bank_wmask_{}".format(bit), "INPUT") for bit in range(self.num_spare_cols): self.add_pin("bank_spare_wen{}".format(bit), "INPUT") self.add_pin("vdd", "POWER") self.add_pin("gnd", "GROUND") def route_layout(self): self.route_data_lines() self.route_layout_pins() self.route_supplies() def route_layout_pins(self): self.route_bitline_pins() self.route_control_pins() def route_data_lines(self): if self.port in self.readwrite_ports: self.route_write_mask_and_array_in(self.port) self.route_write_mask_and_array_to_write_driver(self.port) self.route_write_driver_in(self.port) self.route_sense_amp_out(self.port) self.route_write_driver_to_sense_amp(self.port) self.route_sense_amp_to_column_mux_or_precharge_array(self.port) self.route_column_mux_to_precharge_array(self.port) elif self.port in self.read_ports: self.route_sense_amp_out(self.port) self.route_sense_amp_to_column_mux_or_precharge_array(self.port) self.route_column_mux_to_precharge_array(self.port) else: self.route_write_mask_and_array_in(self.port) self.route_write_mask_and_array_to_write_driver(self.port) self.route_write_driver_in(self.port) self.route_write_driver_to_column_mux_or_precharge_array(self.port) self.route_column_mux_to_precharge_array(self.port) def route_supplies(self): for inst in self.insts: self.copy_power_pins(inst, "vdd") self.copy_power_pins(inst, "gnd") def add_modules(self): cell = factory.create(module_type=OPTS.bitcell) if(cell_properties.use_strap == True and OPTS.num_ports == 1): strap = factory.create(module_type=cell_properties.strap_module, version=cell_properties.strap_version) precharge_width = cell.width + strap.width else: precharge_width = cell.width if self.port == 0: precharge_bit_offsets = [self.bit_offsets[0] - precharge_width] + self.bit_offsets else: precharge_bit_offsets = self.bit_offsets + [self.bit_offsets[-1] + precharge_width] self.precharge_array = factory.create(module_type="precharge_array", columns=self.num_cols + self.num_spare_cols + 1, offsets=precharge_bit_offsets, bitcell_bl=self.bl_names[self.port], bitcell_br=self.br_names[self.port], column_offset=self.port - 1) self.add_mod(self.precharge_array) if self.port in self.read_ports: self.sense_amp_array = factory.create(module_type="sense_amp_array", word_size=self.word_size, offsets=self.bit_offsets, words_per_row=self.words_per_row, num_spare_cols=self.num_spare_cols) self.add_mod(self.sense_amp_array) else: self.sense_amp_array = None if self.col_addr_size > 0: self.column_mux_array = factory.create(module_type="column_mux_array", columns=self.num_cols, word_size=self.word_size, offsets=self.bit_offsets, bitcell_bl=self.bl_names[self.port], bitcell_br=self.br_names[self.port]) self.add_mod(self.column_mux_array) else: self.column_mux_array = None if self.port in self.write_ports: self.write_driver_array = factory.create(module_type="write_driver_array", columns=self.num_cols, word_size=self.word_size, offsets=self.bit_offsets, write_size=self.write_size, num_spare_cols=self.num_spare_cols) self.add_mod(self.write_driver_array) if self.write_size is not None: self.write_mask_and_array = factory.create(module_type="write_mask_and_array", columns=self.num_cols, offsets=self.bit_offsets, word_size=self.word_size, write_size=self.write_size) self.add_mod(self.write_mask_and_array) else: self.write_mask_and_array = None else: self.write_driver_array = None self.write_mask_and_array = None def precompute_constants(self): if self.col_addr_size>0: self.num_col_addr_lines = 2**self.col_addr_size else: self.num_col_addr_lines = 0 self.m2_gap = max(2 * drc("pwell_to_nwell") + drc("nwell_enclose_active"), 3 * self.m2_pitch) self.bitcell = factory.create(module_type=OPTS.bitcell) self.bl_names = self.bitcell.get_all_bl_names() self.br_names = self.bitcell.get_all_br_names() self.wl_names = self.bitcell.get_all_wl_names() self.precharge = factory.create(module_type=OPTS.precharge, bitcell_bl=self.bl_names[0], bitcell_br=self.br_names[0]) def create_precharge_array(self): if not self.precharge_array: self.precharge_array_inst = None return self.precharge_array_inst = self.add_inst(name="precharge_array{}".format(self.port), mod=self.precharge_array) temp = [] if self.port==0: temp.append("rbl_bl") temp.append("rbl_br") for bit in range(self.num_cols): temp.append("bl_{0}".format(bit)) temp.append("br_{0}".format(bit)) for bit in range(self.num_spare_cols): temp.append("sparebl_{0}".format(bit)) temp.append("sparebr_{0}".format(bit)) if self.port==1: temp.append("rbl_bl") temp.append("rbl_br") temp.extend(["p_en_bar", "vdd"]) self.connect_inst(temp) def place_precharge_array(self, offset): self.precharge_array_inst.place(offset=offset, mirror="MX") def create_column_mux_array(self): self.column_mux_array_inst = self.add_inst(name="column_mux_array{}".format(self.port), mod=self.column_mux_array) temp = [] for col in range(self.num_cols): temp.append("bl_{0}".format(col)) temp.append("br_{0}".format(col)) for word in range(self.words_per_row): temp.append("sel_{}".format(word)) for bit in range(self.word_size): temp.append("bl_out_{0}".format(bit)) temp.append("br_out_{0}".format(bit)) temp.append("gnd") self.connect_inst(temp) def place_column_mux_array(self, offset): if self.col_addr_size == 0: return self.column_mux_array_inst.place(offset=offset, mirror="MX") def create_sense_amp_array(self): self.sense_amp_array_inst = self.add_inst(name="sense_amp_array{}".format(self.port), mod=self.sense_amp_array) temp = [] for bit in range(self.word_size): temp.append("dout_{}".format(bit)) if self.words_per_row == 1: temp.append("bl_{0}".format(bit)) temp.append("br_{0}".format(bit)) else: temp.append("bl_out_{0}".format(bit)) temp.append("br_out_{0}".format(bit)) for bit in range(self.num_spare_cols): temp.append("dout_{}".format(self.word_size + bit)) temp.append("sparebl_{0}".format(bit)) temp.append("sparebr_{0}".format(bit)) temp.append("s_en") temp.extend(["vdd", "gnd"]) self.connect_inst(temp) def place_sense_amp_array(self, offset): self.sense_amp_array_inst.place(offset=offset, mirror="MX") def create_write_driver_array(self): self.write_driver_array_inst = self.add_inst(name="write_driver_array{}".format(self.port), mod=self.write_driver_array) temp = [] for bit in range(self.word_size + self.num_spare_cols): temp.append("din_{}".format(bit)) for bit in range(self.word_size): if (self.words_per_row == 1): temp.append("bl_{0}".format(bit)) temp.append("br_{0}".format(bit)) else: temp.append("bl_out_{0}".format(bit)) temp.append("br_out_{0}".format(bit)) for bit in range(self.num_spare_cols): temp.append("sparebl_{0}".format(bit)) temp.append("sparebr_{0}".format(bit)) if self.write_size is not None: for i in range(self.num_wmasks): temp.append("wdriver_sel_{}".format(i)) for i in range(self.num_spare_cols): temp.append("bank_spare_wen{}".format(i)) elif self.num_spare_cols and not self.write_size: temp.append("w_en") for i in range(self.num_spare_cols): temp.append("bank_spare_wen{}".format(i)) else: temp.append("w_en") temp.extend(["vdd", "gnd"]) self.connect_inst(temp)
BSD 3-Clause New or Revised License
hpe-container-platform-community/hpecp-python-library
hpecp/gateway.py
Gateway.purpose
python
def purpose(self): return self.json["purpose"]
@Field: from json['purpose']
https://github.com/hpe-container-platform-community/hpecp-python-library/blob/625fb25c99698a2203b394ef39a253e2b4f0d7c9/hpecp/gateway.py#L159-L161
from __future__ import absolute_import from enum import Enum from requests.structures import CaseInsensitiveDict from hpecp.base_resource import ResourceList from .base_resource import AbstractResource, AbstractWaitableResourceController from .exceptions import APIItemNotFoundException try: basestring except NameError: basestring = str class GatewayStatus(Enum): bundle = 1 installing = 2 installed = 3 ready = 4 unlicensed = 5 configuring = 6 configured = 7 error = 8 sysinfo = 9 unconfiguring = 10 deleting = 11 storage_pending = 12 storage_configuring = 13 storage_error = 14 decommission_in_progress = 15 delete_in_progress = 16 class Gateway(AbstractResource): all_fields = [ "id", "hacapable", "propinfo", "approved_worker_pubkey", "schedule", "ip", "proxy_nodes_hostname", "hostname", "state", "status_info", "purpose", "sysinfo", "tags", ] default_display_fields = [ "id", "ip", "proxy_nodes_hostname", "hostname", "state", "status_info", "purpose", "tags", ] @property def state(self): return self.json["state"] @property def hacapable(self): return self.json["hacapable"] @property def propinfo(self): return self.json["propinfo"] @property def approved_worker_pubkey(self): return self.json["approved_worker_pubkey"] @property def schedule(self): return self.json["schedule"] @property def ip(self): return self.json["ip"] @property def proxy_nodes_hostname(self): try: return self.json["proxy_nodes_hostname"] except KeyError: return "" @property def hostname(self): return self.json["hostname"] @property
MIT License
ukplab/iwcs2017-answer-selection
experiment/qa/train/training.py
InsuranceQATrainingSimple.prepare_next_epoch
python
def prepare_next_epoch(self, model, data, sess, epoch): super(InsuranceQATrainingSimple, self).prepare_next_epoch(model, data, sess, epoch) self.batch_i = 0 if len(self._train_questions) == 0: self.logger.debug('Preparing training examples') self._train_questions, self._train_answers_good, self._train_answers_bad = data.get_items( data.archive.train.qa, self.negative_answers ) self.epoch_random_indices = np.random.permutation(len(self._train_questions))
Prepares the next epoch, especially the batches
https://github.com/ukplab/iwcs2017-answer-selection/blob/a5d24c746e53e9d8dcd9cffff0a9c9a1e5d1321a/experiment/qa/train/training.py#L17-L31
import math import numpy as np from experiment.qa.train import QABatchedTraining class InsuranceQATrainingSimple(QABatchedTraining): def __init__(self, config, config_global, logger): super(InsuranceQATrainingSimple, self).__init__(config, config_global, logger) self._train_questions, self._train_answers_good, self._train_answers_bad = [], [], [] self.batch_i = 0 self.epoch_random_indices = []
Apache License 2.0
city-bureau/city-scrapers
city_scrapers/spiders/il_commerce.py
IlCommerceSpider._parse_start
python
def _parse_start(self, response): start_str = " ".join(response.css("h3.mt-4 *::text").extract()) dt_str = re.search( r"[A-Z][a-z]{2,8} \d{1,2}, \d{4} \d{1,2}:\d{2} [APM]{2}", start_str ).group() return datetime.strptime(dt_str, "%B %d, %Y %I:%M %p")
Parse start datetime as a naive datetime object.
https://github.com/city-bureau/city-scrapers/blob/b295d0aa612e3979a9fccab7c5f55ecea9ed074c/city_scrapers/spiders/il_commerce.py#L83-L89
import re from datetime import datetime from city_scrapers_core.constants import ADVISORY_COMMITTEE, COMMISSION, COMMITTEE from city_scrapers_core.items import Meeting from city_scrapers_core.spiders import CityScrapersSpider class IlCommerceSpider(CityScrapersSpider): name = "il_commerce" agency = "Illinois Commerce Commission" timezone = "America/Chicago" start_urls = [ "https://www.icc.illinois.gov/meetings/default.aspx?dts=32&et=1&et=5&et=3" ] def parse(self, response): for nav_link in response.css(".col-sm-7 a.btn"): if "?bd=" in nav_link.attrib["href"]: yield response.follow( nav_link.attrib["href"], callback=self._parse_events_page ) yield from self._parse_events_page(response) def _parse_events_page(self, response): for item in response.css(".panel-body a"): yield response.follow(item.attrib["href"], callback=self._parse_detail) def _parse_detail(self, response): title = self._parse_title(response) meeting = Meeting( title=title, description=self._parse_description(response), classification=self._parse_classification(title), start=self._parse_start(response), end=None, all_day=False, time_notes="", location=self._parse_location(response), links=self._parse_links(response), source=response.url, ) meeting["status"] = self._get_status( meeting, text=" ".join(response.css(".col-sm-12 *::text").extract()) ) meeting["id"] = self._get_id(meeting) yield meeting def _parse_title(self, response): title_str = re.sub( r"\s+", " ", " ".join(response.css(".soi-container h2 *::text").extract()) ).strip() return re.sub( r"(Illinois Commerce Commission|(?=Committee )Committee Meeting$)", "", title_str, ).strip() def _parse_description(self, response): return re.sub( r"\s+", " ", " ".join(response.css(".col-sm-12 > p *::text").extract()) ).strip() def _parse_classification(self, title): if "advisory" in title.lower(): return ADVISORY_COMMITTEE if "committee" in title.lower(): return COMMITTEE return COMMISSION
MIT License
lun-4/jose
ext/coins+.py
CoinsExt.taxes
python
async def taxes(self, ctx): await self.coins.ensure_taxbank(ctx) acc = await self.coins.get_account(ctx.guild.id) await ctx.send(f'`{self.coins.get_name(ctx.guild)}: {acc["amount"]}`')
Show your taxbank's wallet.
https://github.com/lun-4/jose/blob/fdafb121e0c5d7a731b52b3503f6b12e3538948a/ext/coins+.py#L163-L167
import logging import datetime import decimal from random import uniform import discord from discord.ext import commands from .common import Cog, CoinConverter from .utils import Table from .coins import AccountType log = logging.getLogger(__name__) BASE_CHANCE = decimal.Decimal('1') STEAL_CONSTANT = decimal.Decimal('0.42') DEFAULT_ARREST = 6 DEFAULT_REGEN = 9 GRACE_PERIOD = 5 class CooldownTypes: prison = 'prison' points = 'points' class CooldownError(Exception): pass def fmt_tdelta(delta): return datetime.timedelta(days=delta.days, seconds=delta.seconds) class CoinsExt(Cog, requires=['coins']): @property def coins2(self): return self.bot.get_cog('Coins2') async def show(self, ctx, accounts, *, field='amount', limit=10): filtered = [] for idx, account in enumerate(accounts): name = self.jcoin.get_name(account['account_id'], account=account) account['_name'] = name filtered.append(account) if len(filtered) == limit: break table = Table('pos', 'name', 'account id', field) for idx, account in enumerate(filtered): table.add_row( str(idx + 1), account['_name'], str(account['account_id']), str(account[field])) rendered = await table.render(loop=self.loop) if len(rendered) > 1993: await ctx.send(f'very big cant show: {len(rendered)}') else: await ctx.send(f'```\n{rendered}```') @commands.command() async def top(self, ctx, mode: str = 'g', limit: int = 10): if limit > 30 or limit < 1: raise self.SayException('invalid limit') if mode == 'g': accounts = await self.coins.jc_get( '/wallets', { 'key': 'global', 'reverse': True, 'type': self.coins.AccountType.USER, 'limit': limit, }) elif mode == 'l': accounts = await self.coins.jc_get( '/wallets', { 'key': 'local', 'guild_id': ctx.guild.id, 'reverse': True, 'limit': limit }) elif mode == 't': accounts = await self.coins.jc_get('/wallets', { 'key': 'taxpaid', 'reverse': True, 'limit': limit, }) return await self.show(ctx, accounts, field='taxpaid', limit=limit) elif mode == 'b': accounts = await self.coins.jc_get('/wallets', { 'key': 'taxbanks', 'reverse': True, 'limit': limit, }) elif mode == 'p': accounts = await self.coins.jc_get('/wallets', { 'key': 'global', 'type': AccountType.USER, 'limit': limit, }) elif mode == 'lp': accounts = await self.coins.jc_get('/wallets', { 'key': 'local', 'guild_id': ctx.guild.id, 'limit': limit, }) else: raise self.SayException('mode not found') await self.show(ctx, accounts, limit=limit) @commands.command(name='prices') async def _prices(self, ctx): em = discord.Embed(title='Pricing', color=discord.Color(0x2192bc)) descriptions = { 'OPR': ( 'Operational tax for high-load commands', ('yt', 'datamosh'), ), 'API': ('API tax (includes the NSFW commands)', ('xkcd', 'wolframalpha', 'weather', 'money', 'urban')), 'TRN': ('Translation tax', ('translate', )), } for category in self.prices: price = self.prices[category] em.add_field( name=f'Category: {category}, Price: {price}', value=f'{descriptions[category][0]}: ' f'{", ".join(descriptions[category][1])}', inline=False) await ctx.send(embed=em) @commands.command(name='taxes') @commands.guild_only()
MIT License
twopirllc/pandas-ta
pandas_ta/momentum/kst.py
kst
python
def kst(close, roc1=None, roc2=None, roc3=None, roc4=None, sma1=None, sma2=None, sma3=None, sma4=None, signal=None, drift=None, offset=None, **kwargs): roc1 = int(roc1) if roc1 and roc1 > 0 else 10 roc2 = int(roc2) if roc2 and roc2 > 0 else 15 roc3 = int(roc3) if roc3 and roc3 > 0 else 20 roc4 = int(roc4) if roc4 and roc4 > 0 else 30 sma1 = int(sma1) if sma1 and sma1 > 0 else 10 sma2 = int(sma2) if sma2 and sma2 > 0 else 10 sma3 = int(sma3) if sma3 and sma3 > 0 else 10 sma4 = int(sma4) if sma4 and sma4 > 0 else 15 signal = int(signal) if signal and signal > 0 else 9 _length = max(roc1, roc2, roc3, roc4, sma1, sma2, sma3, sma4, signal) close = verify_series(close, _length) drift = get_drift(drift) offset = get_offset(offset) if close is None: return rocma1 = roc(close, roc1).rolling(sma1).mean() rocma2 = roc(close, roc2).rolling(sma2).mean() rocma3 = roc(close, roc3).rolling(sma3).mean() rocma4 = roc(close, roc4).rolling(sma4).mean() kst = 100 * (rocma1 + 2 * rocma2 + 3 * rocma3 + 4 * rocma4) kst_signal = kst.rolling(signal).mean() if offset != 0: kst = kst.shift(offset) kst_signal = kst_signal.shift(offset) if "fillna" in kwargs: kst.fillna(kwargs["fillna"], inplace=True) kst_signal.fillna(kwargs["fillna"], inplace=True) if "fill_method" in kwargs: kst.fillna(method=kwargs["fill_method"], inplace=True) kst_signal.fillna(method=kwargs["fill_method"], inplace=True) kst.name = f"KST_{roc1}_{roc2}_{roc3}_{roc4}_{sma1}_{sma2}_{sma3}_{sma4}" kst_signal.name = f"KSTs_{signal}" kst.category = kst_signal.category = "momentum" data = {kst.name: kst, kst_signal.name: kst_signal} kstdf = DataFrame(data) kstdf.name = f"KST_{roc1}_{roc2}_{roc3}_{roc4}_{sma1}_{sma2}_{sma3}_{sma4}_{signal}" kstdf.category = "momentum" return kstdf
Indicator: 'Know Sure Thing' (KST)
https://github.com/twopirllc/pandas-ta/blob/bc3b292bf1cc1d5f2aba50bb750a75209d655b37/pandas_ta/momentum/kst.py#L7-L61
from pandas import DataFrame from .roc import roc from pandas_ta.utils import get_drift, get_offset, verify_series
MIT License
loudr/pale
pale/endpoint.py
Endpoint._execute
python
def _execute(self, request, **kwargs): try: self._create_context(request) self._authenticate() context = get_current_context() self._parse_args() if hasattr(self, '_before_handlers') and isinstance(self._before_handlers, (list, tuple)): for handler in self._before_handlers: handler(context) context.handler_result = self._handle(context) if hasattr(self, '_after_handlers') and isinstance(self._after_handlers, (list, tuple)): for handler in self._after_handlers: handler(context) self._render() response = context.response except AuthenticationError as e: if hasattr(e, 'message') and e.message is not None: message = e.message else: message = "You don't have permission to do that." err = APIError.Forbidden(message) response = self._response_class(*err.response) response.headers["Content-Type"] = 'application/json' except ArgumentError as e: err = APIError.UnprocessableEntity(e.message) response = self._response_class(*err.response) response.headers["Content-Type"] = 'application/json' except APIError as e: response = self._response_class(*e.response) response.headers["Content-Type"] = 'application/json' except PaleRaisedResponse as r: response = self._response_class(*r.response) response.headers["Content-Type"] = 'application/json' except Exception as e: logging.exception("Failed to handle Pale Endpoint %s: %r", self.__class__.__name__, e) err = APIError.Exception(repr(e)) response = self._response_class(*err.response) response.headers["Content-Type"] = 'application/json' allow_cors = getattr(self, "_allow_cors", None) if allow_cors is True: response.headers['Access-Control-Allow-Origin'] = '*' elif isinstance(allow_cors, basestring): response.headers['Access-Control-Allow-Origin'] = allow_cors context.response = response try: if hasattr(self, '_after_response_handlers') and isinstance(self._after_response_handlers, (list, tuple)): for handler in self._after_response_handlers: handler(context, response) except Exception as e: logging.exception( "Failed to process _after_response_handlers for Endpoint %s", self.__class__.__name__) raise return response
The top-level execute function for the endpoint. This method is intended to remain as-is, and not be overridden. It gets called by your HTTP framework's route handler, and performs the following actions to process the request: ``authenticate_request`` Validate the Bearer token, populate the ``current_user``, and make sure that the token covers the scope needed to call the requested method. * * ``parse arguments`` The argument parser is responsible for: - First, coercing and patching any parameters that might require it due to versioning (i.e. the caller is using an old API version that supports `index` as a parameter for pagination, but the current version uses the name `offset`) - Second, iterating through the endpoint's supported arguments and validating that the params passed in comply with the endpoint's requirements - Third, populating the `context.args` array with the validated arguments If any of the arguments are invalid, then the Argument parser will raise an ArgumentError that bubbles up to the `try/catch` block of the execute method. * * ``before handler`` The before_handlers are specified by the Endpoint definition, and are intended to supporty DRY-ing up your codebase. Have a set of Endpoints that all need to grab an object from the ORM based on the same parameter? Make them inherit from an Endpoint subclass that performs that task in a before_handler! * * ``handle`` The core logic of your API endpoint, as implemented by you in your Endpoint subclass. The API Framework expects ``handle`` to return a dictionary specifying the response object and the JSON key that it should hang off of, or a tuple of a dictionary and an HTTP status code. * * ``after_handler`` Like the before_handlers, the ``after_handlers`` happen after the handle method, and allow the endpoint developer to re-use code for post-processing data from an endpoint. * * ``render response`` Like the argument parser, the response renderer is responsible for a few things: - First, it converts the ORM objects into JSON-serializable Python dictionaries using the Resource objects defined by the API implementation, - Second, it does any version parameter coersion, renaming and reformatting the edge version of the response to match the version requested by the API caller, - and Third, it serializes the Python dictionary into the response format requested by the API caller (right now, we only support JSON responses, but it'd be reasonble to support something like HTML or XML or whatever in the future). The rendered JSON text is then returned as the response that should be sent by your HTTP framework's routing handler. * * ``_after_response_handler`` The `_after_response_handlers` are specified by the Endpoint definition, and enable manipulation of the response object before it is returned to the client, but after the response is rendered. Because these are instancemethods, they may share instance data from `self` specified in the endpoint's `_handle` method. ``_finalize_content`` The `_finalize_content` method is overridden by the Endpoint and is called after the response is rendered into a serializable result. This method is called with two arguments, the context and the rendered content, and expected to return updated rendered content. For in-place modification of dicts, this method will still be expected to return the given argument. ``_allow_cors`` This value is set to enable CORs for a given endpoint. When set to a string it supplies an explicit value to 'Access-Control-Allow-Origin'. Set to True, this will allow access from *all* domains; Access-Control-Allow-Origin = "*"
https://github.com/loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/endpoint.py#L122-L286
import datetime import json import logging import sys import threading import arrow from pale import config as pale_config from pale.arguments import BaseArgument from pale.fields import ResourceField, ListField, ResourceListField from pale.errors import APIError, ArgumentError, AuthenticationError from pale.meta import MetaHasFields from pale.resource import NoContentResource, Resource, DebugResource from pale.response import PaleRaisedResponse _tls = threading.local() def get_current_context(): return _tls.current_context def set_current_context(context): _tls.current_context = context class PaleDefaultJSONEncoder(json.JSONEncoder): def default(self, obj): try: if isinstance(obj, datetime.datetime): encoded = arrow.get(obj).isoformat() else: encoded = json.JSONEncoder.default(self, obj) except TypeError as e: if hasattr(obj, 'to_dict') and callable(obj.to_dict): encoded = obj.to_dict() else: raise e return encoded class Endpoint(object): __metaclass__ = MetaHasFields _response_class = None _json_serializer = PaleDefaultJSONEncoder() _default_cache = 'no-cache' @classmethod def _fix_up_fields(cls): cls._arguments = dict() if cls.__module__ == __name__: return for name in set(dir(cls)): attr = getattr(cls, name, None) if isinstance(attr, BaseArgument): if name.startswith('_'): raise TypeError("Endpoint argument %s cannot begin with " "an underscore, as these attributes are reserved " "for instance variables of the endpoint object, " "rather than for arguments to your HTTP Endpoint." % name) attr._fix_up(cls, name) cls._arguments[attr.name] = attr def _set_response_class(self, response_class): self._response_class = response_class @classmethod def _set_json_serializer(cls, serializer): cls._json_serializer = serializer @classmethod def _metadata(cls, *args, **kwargs): return dict(**kwargs) def _handle(self, context): pass def _finally(self): pass
MIT License
arxiv/arxiv-submission-core
agent/agent/runner/async_runner.py
execute_async_save
python
def execute_async_save(*events: Event, submission_id: int = -1): if submission_id < 0: raise RuntimeError('Invalid submission ID') kwargs = {'submission_id': submission_id} get_or_create_worker_app().send_task('save', (*events,), kwargs)
Save events asynchronously, using :func:`.async_save`. Parameters ---------- events Each item is an :class:`.Event`. submission_id : int Identifier of the submission to which the commands/events apply.
https://github.com/arxiv/arxiv-submission-core/blob/6077ce4e0685d67ce7010800083a898857158112/agent/agent/runner/async_runner.py#L145-L160
from typing import Callable, Optional, Iterable, Tuple, Any, Union, Dict, List from functools import wraps, partial import math import random from flask import Flask from celery import shared_task, Celery, Task, chain from celery.result import AsyncResult from retry import retry from arxiv.base.globals import get_application_config, get_application_global from arxiv.base import logging from arxiv.submission.domain.submission import Submission from arxiv.submission.domain.event import Event from arxiv.submission.domain.agent import Agent from arxiv.submission import save from arxiv.submission.exceptions import NothingToDo from arxiv.submission.services import classic from .. import config from .base import ProcessRunner from ..process import ProcessType, Process, Failed, Recoverable from ..domain import ProcessData, Trigger logger = logging.getLogger(__name__) logger.propagate = False class AsyncProcessRunner(ProcessRunner): processes = {} @classmethod def prepare(cls, ProcessImpl: ProcessType) -> None: cls.processes[ProcessImpl.__name__] = register_process(ProcessImpl) def run(self, trigger: Trigger) -> None: _run = self.processes[self.process.name] _run(self.process.submission_id, self.process.process_id, trigger) def create_worker_app() -> Celery: result_backend = config.RESULT_BACKEND broker = config.BROKER_URL celery_app = Celery('submission', results=result_backend, backend=result_backend, result_backend=result_backend, broker=broker) celery_app.conf.queue_name_prefix = config.QUEUE_NAME_PREFIX celery_app.conf.task_default_queue = config.TASK_DEFAULT_QUEUE celery_app.conf.prefetch_multiplier = config.PREFETCH_MULTIPLIER celery_app.conf.task_acks_late = config.TASK_ACKS_LATE celery_app.conf.accept_content = config.CELERY_ACCEPT_CONTENT celery_app.conf.task_serializer = config.CELERY_TASK_SERIALIZER celery_app.conf.result_serializer = config.CELERY_RESULT_SERIALIZER celery_app.conf.backend = result_backend register_save = celery_app.task( name='save', bind=True, max_retries=config.MAX_SAVE_RETRIES, default_retry_delay=config.DEFAULT_SAVE_RETRY_DELAY ) register_save(async_save) return celery_app def get_or_create_worker_app() -> Celery: g = get_application_global() if not g: return create_worker_app() if 'worker' not in g: g.worker = create_worker_app() return g.worker @retry(Recoverable, backoff=2) def async_save(self, *events: Event, submission_id: int = -1) -> None: if submission_id < 0: raise RuntimeError('Invalid submission ID') try: save(*events, submission_id=submission_id) except NothingToDo as e: logger.debug('No events to save, move along: %s', e) except classic.Unavailable as e: raise Recoverable('Database is not available; try again') from e except classic.ConsistencyError as e: logger.error('Encountered a ConsistencyError; could not save: %s', e) raise Failed('Encountered a consistency error') from e except Exception as e: raise Failed('Unhandled exception: %s' % e) from e
MIT License
wangkenpu/conv-tasnet-pytorch
utils/sigproc/mask.py
iam
python
def iam(target, mixture, use_log, use_power): target = convert_to_linear(target, use_log, use_power) mixture = convert_to_linear(mixture, use_log, use_power) mask = np.abs(target) / np.abs(mixture) return mask
Compute ideal amplitude mask
https://github.com/wangkenpu/conv-tasnet-pytorch/blob/64188ffa48971218fdd68b66906970f215d7eca2/utils/sigproc/mask.py#L66-L71
from __future__ import absolute_import from __future__ import division from __future__ import absolute_import import numpy as np import torch EPSILON = np.finfo(np.float32).eps MAX_FLOAT = np.finfo(np.float32).max def convert_to_linear(feat, use_log, use_power): if use_log: feat = np.exp(feat) feat = np.clip(feat, a_min=EPSILON, a_max=MAX_FLOAT) if use_power: feat = np.clip(feat, a_min=EPSILON, a_max=MAX_FLOAT) feat = np.sqrt(feat) return feat def compute_mask(target, reference, use_log, use_power, mask_type): mask = { 'ibm': ibm(target, reference, use_log, use_power), 'irm': irm(target, reference, use_log, use_power), 'iam': iam(target, reference, use_log, use_power), 'ipsm': ipsm(target, reference, use_log, use_power), }[mask_type] return mask def ibm(target, interference, use_log, use_power): target = convert_to_linear(target, use_log, use_power) interference = convert_to_linear(interference, use_log, use_power) mask = np.zeros(np.shape(target), dtype=np.float32) mask[np.abs(target) >= np.abs(interference)] = 1.0 return mask
MIT License
openstack/ironic
ironic/drivers/modules/agent_power.py
AgentPower.get_power_state
python
def get_power_state(self, task): if cond_utils.agent_is_alive(task.node): return states.POWER_ON else: LOG.error('Node %s is not fast-track-able, cannot determine ' 'its power state via the "agent" power interface', task.node.uuid) return None
Return the power state of the task's node. Essentially, the only known state is POWER ON, everything else is an error (or more precisely ``None``). :param task: A TaskManager instance containing the node to act on. :returns: A power state. One of :mod:`ironic.common.states`.
https://github.com/openstack/ironic/blob/a4a6f26333be31b84a9b1a874dde506e61d407d3/ironic/drivers/modules/agent_power.py#L97-L113
import time from oslo_config import cfg from oslo_log import log import tenacity from ironic.common import exception from ironic.common.i18n import _ from ironic.common import states from ironic.conductor import utils as cond_utils from ironic.drivers import base from ironic.drivers.modules import agent_client CONF = cfg.CONF LOG = log.getLogger(__name__) _POWER_WAIT = 30 class AgentPower(base.PowerInterface): def __init__(self): super(AgentPower, self).__init__() if not CONF.deploy.fast_track: raise exception.InvalidParameterValue( _('[deploy]fast_track must be True to enable the agent ' 'power interface')) self._client = agent_client.AgentClient() def get_properties(self): return {} def validate(self, task): if not CONF.deploy.fast_track: raise exception.InvalidParameterValue( _('[deploy]fast_track must be True to enable the agent ' 'power interface')) if not cond_utils.agent_is_alive(task.node): raise exception.InvalidParameterValue( _('Agent seems offline for node %s, the agent power interface ' 'cannot be used') % task.node.uuid) def supports_power_sync(self, task): return False def get_supported_power_states(self, task): return [states.REBOOT, states.SOFT_REBOOT]
Apache License 2.0
fuyukai/asyncqlio
asyncqlio/backends/sqlite3/aiosqlite3.py
Sqlite3Transaction.begin
python
async def begin(self): self.connection = await self.connector.pool.acquire()
Begins the current transaction.
https://github.com/fuyukai/asyncqlio/blob/9bdb49076dea14730ec39e6d033061d6bccc016c/asyncqlio/backends/sqlite3/aiosqlite3.py#L132-L136
import asyncio import logging import sqlite3 import typing from asyncio_extras import threadpool from asyncqlio.backends.base import BaseConnector, BaseResultSet, BaseTransaction, DictRow from asyncqlio.exc import DatabaseException, IntegrityError from asyncqlio.utils import separate_statements logger = logging.getLogger(__name__) class _SqlitePool: def __init__(self, max_size: int = 12, *, loop=None, **kwargs): self.queue = asyncio.Queue(maxsize=max_size, loop=loop) self.connection_args = kwargs def _new_connection(self) -> sqlite3.Connection: conn = sqlite3.connect(**self.connection_args, check_same_thread=False) conn.row_factory = sqlite3.Row return conn async def connect(self, *args, **kwargs): async with threadpool(): for x in range(0, self.queue.maxsize): conn = self._new_connection() self.queue.put_nowait(conn) return self async def acquire(self) -> sqlite3.Connection: return await self.queue.get() async def release(self, conn: sqlite3.Connection): async with threadpool(): if conn.in_transaction: conn.close() conn = self._new_connection() self.queue.put_nowait(conn) async def close(self): while True: try: conn = self.queue.get_nowait() except asyncio.QueueEmpty: return conn.close() class Sqlite3Connector(BaseConnector): def __init__(self, parsed, *, max_size: int = 12): super().__init__(parsed) self.loop = None self.max_size = max_size self.pool = None async def connect(self, *, loop: asyncio.AbstractEventLoop = None) -> 'BaseConnector': self.loop = loop or asyncio.get_event_loop() self.pool = _SqlitePool(max_size=self.max_size, database=self.db, loop=self.loop, **self.params) await self.pool.connect() return self async def close(self): await self.pool.close() def get_transaction(self) -> 'BaseTransaction': return Sqlite3Transaction(self) def emit_param(self, name: str) -> str: return ":{}".format(name) async def get_db_server_version(self): return sqlite3.sqlite_version class Sqlite3Transaction(BaseTransaction): def __init__(self, connector: 'Sqlite3Connector'): super().__init__(connector) self.connection = None self._lock = asyncio.Lock(loop=self.connector.loop)
MIT License
thudm/proteinlm
pretrain/megatron/initialize.py
initialize_megatron
python
def initialize_megatron(extra_args_provider=None, args_defaults={}, ignore_unknown_args=False, allow_no_cuda=False): if not allow_no_cuda: assert torch.cuda.is_available(), 'Megatron requires CUDA.' set_global_variables(extra_args_provider=extra_args_provider, args_defaults=args_defaults, ignore_unknown_args=ignore_unknown_args) def finish_mpu_init(): args = get_args() _initialize_distributed() if args.rank == 0: print('> setting random seeds to {} ...'.format(args.seed)) _set_random_seed(args.seed) args = get_args() if args.lazy_mpu_init: args.use_cpu_initialization=True set_tensor_model_parallel_world_size(args.tensor_model_parallel_size) set_tensor_model_parallel_rank(args.rank) return finish_mpu_init else: finish_mpu_init() _initialize_mem_buffs() _init_autoresume() try: from megatron.data import helpers except: if torch.distributed.get_rank() == 0: from megatron.data.dataset_utils import compile_helper compile_helper() torch.distributed.barrier() return None
Set global variables, initialize distributed, and set autoresume and random seeds. `allow_no_cuda` should not be set unless using megatron for cpu only data processing. In general this arg should not be set unless you know what you are doing. Returns a function to finalize distributed env initialization (optionally, only when args.lazy_mpu_init == True)
https://github.com/thudm/proteinlm/blob/fda4f381b4b974721b187cece968dd7bc96a81f4/pretrain/megatron/initialize.py#L31-L93
import random import os import numpy as np import torch from megatron import get_adlr_autoresume from megatron import get_args from megatron import get_tensorboard_writer from megatron import mpu from megatron.global_vars import set_global_variables from megatron.mpu import set_tensor_model_parallel_rank, set_tensor_model_parallel_world_size
Apache License 2.0
skylothar/certbot-dns-dnspod
certbot_dns_dnspod/client.py
get_base_domain
python
def get_base_domain(record): fragments = record.rsplit(".", 2) if len(fragments) == 3: sub_domain, domain, tld = fragments base_domain = u"{0}.{1}".format(domain, tld) elif len(fragments) == 2: sub_domain = u"@" base_domain = record else: raise errors.PluginError( u"Unable to determine sub_domain for {0}.".format(record) ) logger.debug(u"%s => %s + %s", record, sub_domain, base_domain) return sub_domain, base_domain
Extrat the "sub_domain" and "base_domain" for DNSPOD from given record :param str record: The record name (typically beginning with "_acme-challenge."). :returns: The sub_domain and domain, if found. :rtype: (str, str) :raises certbot.errors.PluginError: if no sub_domain is found.
https://github.com/skylothar/certbot-dns-dnspod/blob/b081094dd4aef2d2509109f64cf6acbf5bf185ba/certbot_dns_dnspod/client.py#L15-L39
import logging import requests from certbot import errors __version__ = "0.0.1" logger = logging.getLogger(__name__)
Apache License 2.0
bashfuscator/bashfuscator
bashfuscator/core/engine/obfuscation_handler.py
ObfuscationHandler.getPrefMutators
python
def getPrefMutators(self, mutators, sizePref, timePref, binaryPref=None, filePref=None, prevCmdOb=None): goodMutators = self.getPrefItems(mutators, sizePref, timePref) if binaryPref: binList = binaryPref[0] includeBinary = binaryPref[1] prefMutators = [] for mutator in goodMutators: if mutator.mutatorType == "command": if prevCmdOb and prevCmdOb.reversible and prevCmdOb.name == mutator.name: continue prefStubs = self.getPrefStubs(mutator.stubs, sizePref, timePref, binaryPref, filePref) if prefStubs: mutator.prefStubs = prefStubs else: continue elif filePref is False and mutator.mutatorType != "command" and mutator.fileWrite != filePref: continue elif binaryPref: badBinary = False for binary in mutator.binariesUsed: if (binary in binList) != includeBinary: if includeBinary: if mutator.binariesUsed: badBinary = True break else: continue else: badBinary = True break if badBinary: continue prefMutators.append(mutator) return prefMutators
Get Mutators from a sequence which are suitable to use based off the user's preferences. :param seq: list of Mutators of Stubs :type seq: list :param sizePref: payload size user preference :type sizePref: int :param timePref: execution time user preference :type timePref: int :param binaryPref: list of binaries that the chosen Mutator should or should not use :type binaryPref: tuple containing a list of strs, and a bool :param filePref: file write user preference :type filePref: bool :param prevCmdOb: the previous CommandObfuscator used. Should only be passed if a CommandObfuscator was used to generate the most recent obfuscation layer :type prevCmdOb: :class:`bashfuscator.lib.command_mutators.CommandObfuscator` :returns: list of :class:`bashfuscator.common.objects.Mutator` objects, or None if there are no preferable Mutators in the 'mutators' argument
https://github.com/bashfuscator/bashfuscator/blob/7487348da2d0112213f8540ae28bf12b652f924a/bashfuscator/core/engine/obfuscation_handler.py#L484-L554
from bashfuscator.common.messages import printError, printWarning from bashfuscator.core.engine.mangler import Mangler from bashfuscator.core.engine.random import RandomGen from bashfuscator.core.utils import import_mutators class ObfuscationHandler(object): def __init__(self, cmdObfuscators=None, strObfuscators=None, tokObfuscators=None, encoders=None, compressors=None, args=None): if cmdObfuscators and strObfuscators and tokObfuscators and encoders and compressors: self.cmdObfuscators = cmdObfuscators self.strObfuscators = strObfuscators self.tokObfuscators = tokObfuscators self.encoders = encoders self.compressors = compressors else: self.cmdObfuscators, self.strObfuscators, self.tokObfuscators, self.encoders, self.compressors = import_mutators() if args: self.layers = args.layers self.sizePref = args.payload_size self.timePref = args.execution_time self.binaryPref = args.binaryPref self.filePref = args.no_file_write self.writeDir = args.write_dir self.full_ascii_strings = args.full_ascii_strings self.debug = args.debug self.clip = args.clip self.originalCmd = args.command if args.choose_mutators: self.userMutators = args.choose_mutators elif args.choose_all: self.userMutators = args.choose_all else: self.userMutators = None if args.no_mangling is not None: self.enableMangling = args.no_mangling else: self.enableMangling = None if args.no_binary_mangling is not None: self.mangleBinaries = args.no_binary_mangling else: self.mangleBinaries = None if args.binary_mangle_percent: self.binaryManglePercent = args.binary_mangle_percent else: self.binaryManglePercent = None if args.no_random_whitespace is not None: self.randWhitespace = args.no_random_whitespace else: self.randWhitespace = None if args.random_whitespace_range: self.randWhitespaceRange = args.random_whitespace_range else: self.randWhitespaceRange = None if args.no_insert_chars is not None: self.insertChars = args.no_insert_chars else: self.insertChars = None if args.insert_chars_range: self.insertCharsRange = args.insert_chars_range else: self.insertCharsRange = None if args.no_misleading_commands is not None: self.misleadingCmds = args.no_misleading_commands else: self.misleadingCmds = None if args.misleading_commands_range: self.misleadingCmdsRange = args.misleading_commands_range else: self.misleadingCmdsRange = None if args.no_integer_mangling is not None: self.mangleIntegers = args.no_integer_mangling else: self.mangleIntegers = None if args.no_integer_expansion is not None: self.expandIntegers = args.no_integer_expansion else: self.expandIntegers = None if args.no_integer_base_randomization is not None: self.randomizeIntegerBases = args.no_integer_base_randomization else: self.randomizeIntegerBases = None if args.integer_expansion_depth: self.integerExpansionDepth = args.integer_expansion_depth else: self.integerExpansionDepth = None if args.no_terminator_randomization is not None: self.randomizeTerminators = args.no_terminator_randomization else: self.randomizeTerminators = None else: self.sizePref = 2 self.timePref = 2 self.binaryPref = None self.filePref = True self.writeDir = "/tmp/" self.full_ascii_strings = False self.debug = False self.clip = False self.userMutators = None self.enableMangling = None self.mangleBinaries = None self.binaryManglePercent = None self.randWhitespace = None self.randWhitespaceRange = None self.insertChars = None self.insertCharsRange = None self.misleadingCmds = None self.misleadingCmdsRange = None self.mangleIntegers = None self.expandIntegers = None self.randomizeIntegerBases = None self.integerExpansionDepth = None self.randomizeTerminators = None self.prevCmdOb = None self.mutatorList = [] self.randGen = RandomGen() if args and args.full_ascii_strings: self.randGen.setFullAsciiStrings() def generatePayload(self): payload = self.originalCmd for i in range(self.layers): if self.userMutators: for userMutator in self.userMutators: userStub = None if userMutator.count("/") == 2: if userMutator[-1] == "/": userMutator = userMutator[:-1] else: userStub = userMutator.split("/")[2] userMutator = userMutator[:-int(len(userStub) + 1)] self.mutatorList.append(self.getMutator(userMutator, userStub, self.sizePref, self.timePref, self.binaryPref, self.filePref)) else: self.mutatorList.append(self.getMutator(sizePref=self.sizePref, timePref=self.timePref, binaryPref=self.binaryPref, filePref=self.filePref)) self.checkMutatorList() for mutator in self.mutatorList: mutator.writeDir = self.writeDir mutator.mangler._initialize(self.sizePref, self.enableMangling, self.mangleBinaries, self.binaryManglePercent, self.randWhitespace, self.randWhitespaceRange, self.insertChars, self.insertCharsRange, self.misleadingCmds, self.misleadingCmdsRange, self.mangleIntegers, self.expandIntegers, self.randomizeIntegerBases, self.integerExpansionDepth, self.randomizeTerminators, self.debug) payload = mutator.mutate(payload) mutator._obfuscatedCmd = payload self.randGen.forgetUniqueStrs() payload = self.evalWrap(payload, mutator) return payload def checkMutatorList(self): reverseableMutator = "" nonReadableWarning = False for i, mutator in enumerate(self.mutatorList): if self.clip and ((mutator.unreadableOutput and not nonReadableWarning) or self.full_ascii_strings): printWarning("Output may consist of unreadable ASCII characters and probably won't execute from your clipboard correctly. Saving output with '-o' is recommended") nonReadableWarning = True if mutator.mutatorType == "command" and mutator.reversible: if reverseableMutator == mutator.longName: printWarning(f"{mutator.longName} used twice in a row, part of the output may be in the clear") reverseableMutator = "" else: reverseableMutator = mutator.longName else: reverseableMutator = "" def getMutator(self, userMutator=None, userStub=None, sizePref=None, timePref=None, binaryPref=None, filePref=None): selMutator = None if userMutator: mutatorType = userMutator.split("/")[0] if mutatorType == "command": selMutator = self.choosePrefMutator(self.cmdObfuscators, sizePref, timePref, binaryPref, filePref, self.prevCmdOb, userMutator, userStub) self.prevCmdOb = selMutator elif mutatorType == "string": selMutator = self.choosePrefMutator(self.strObfuscators, binaryPref=binaryPref, filePref=filePref, userMutator=userMutator) elif mutatorType == "token": selMutator = self.choosePrefMutator(self.tokObfuscators, binaryPref=binaryPref, filePref=filePref, userMutator=userMutator) elif mutatorType == "encode": selMutator = self.choosePrefMutator(self.encoders, binaryPref=binaryPref, filePref=filePref, userMutator=userMutator) elif mutatorType == "compress": selMutator = self.choosePrefMutator(self.compressors, binaryPref=binaryPref, filePref=filePref, userMutator=userMutator) else: printError(f"{mutatorType} isn't a valid mutator type") else: obChoice = self.randGen.randChoice(3) if obChoice == 0: selMutator = self.choosePrefMutator(self.cmdObfuscators, sizePref, timePref, binaryPref, filePref, self.prevCmdOb) self.prevCmdOb = selMutator elif obChoice == 1: selMutator = self.choosePrefMutator(self.strObfuscators, sizePref, timePref, binaryPref, filePref) else: selMutator = self.choosePrefMutator(self.tokObfuscators, sizePref, timePref) selMutator.sizePref = sizePref selMutator.timePref = timePref return selMutator def genObfuscationLayer(self, payload, userMutator=None, userStub=None, sizePref=None, timePref=None, binaryPref=None, filePref=None, writeDir=None, enableMangling=None, mangleBinaries=None, binaryManglePercent=None, randWhitespace=None, randWhitespaceRange=None, insertChars=None, insertCharsRange=None, misleadingCmds=None, misleadingCmdsRange=None, mangleIntegers=None, expandIntegers=None, randomizeIntegerBases=None, integerExpansionDepth=None, randomizeTerminators=None, debug=None): if sizePref is None: sizePref = self.sizePref if timePref is None: timePref = self.timePref if binaryPref is None: binaryPref = self.binaryPref if filePref is None: filePref = self.filePref if writeDir is None: writeDir = self.writeDir if enableMangling is None: enableMangling = self.enableMangling if mangleBinaries is None: mangleBinaries = self.mangleBinaries if binaryManglePercent is None: binaryManglePercent = self.binaryManglePercent if randWhitespace is None: randWhitespace = self.randWhitespace if randWhitespaceRange is None: randWhitespaceRange = self.randWhitespaceRange if insertChars is None: insertChars = self.insertChars if insertCharsRange is None: insertCharsRange = self.insertCharsRange if misleadingCmds is None: misleadingCmds = self.misleadingCmds if misleadingCmdsRange is None: misleadingCmdsRange = self.misleadingCmdsRange if mangleIntegers is None: mangleIntegers = self.mangleIntegers if expandIntegers is None: expandIntegers = self.expandIntegers if randomizeIntegerBases is None: randomizeIntegerBases = self.randomizeIntegerBases if integerExpansionDepth is None: integerExpansionDepth = self.integerExpansionDepth if randomizeTerminators is None: randomizeTerminators = self.randomizeTerminators if debug is None: debug = self.debug selMutator = self.getMutator(userMutator, userStub, sizePref, timePref, binaryPref, filePref) selMutator.writeDir = writeDir selMutator.mangler._initialize(sizePref, enableMangling, mangleBinaries, binaryManglePercent, randWhitespace, randWhitespaceRange, insertChars, insertCharsRange, misleadingCmds, misleadingCmdsRange, mangleIntegers, expandIntegers, randomizeIntegerBases, integerExpansionDepth, randomizeTerminators, debug) payload = selMutator.mutate(payload) selMutator._obfuscatedCmd = payload self.randGen.forgetUniqueStrs() payload = self.evalWrap(payload, selMutator) return payload def evalWrap(self, payload, selMutator): if selMutator.evalWrap: evalMethodChoice = self.randGen.randChoice(3) if evalMethodChoice == 1: wrappedPayload = selMutator.mangler._mangleLine('* *:eval:^ ^"$(? ?DATA? ?)"* *', payload) else: shellChoice = self.randGen.randChoice(3) if shellChoice == 0: bashShell = ":bash:" elif shellChoice == 1: bashShell = "$BASH" else: bashShell = "${!#}" if evalMethodChoice == 2: wrappedPayload = selMutator.mangler._mangleLine(f'* *:printf:^ ^%s^ ^"$(? ?DATA? ?)"* *|* *{bashShell}* *', payload) else: wrappedPayload = selMutator.mangler._mangleLine(f'* *{bashShell}% %<<<^ ^"$(? ?DATA? ?)"* *', payload) else: wrappedPayload = selMutator.mangler._mangleLine(f"? ?(? ?DATA? ?)", payload) return wrappedPayload def choosePrefMutator(self, mutators, sizePref=None, timePref=None, binaryPref=None, filePref=None, prevCmdOb=None, userMutator=None, userStub=None): selMutator = None if userMutator: if binaryPref: binList = binaryPref[0] includeBinary = binaryPref[1] for mutator in mutators: if mutator.longName == userMutator: if filePref is False and mutator.mutatorType != "command" and mutator.fileWrite != filePref: printWarning(f"'{userMutator}' mutator preforms file writes") elif binaryPref and mutator.mutatorType != "command": for binary in mutator.binariesUsed: if (binary in binList) != includeBinary: printWarning(f"'{userMutator}' mutator contains an unwanted binary") selMutator = mutator if selMutator.mutatorType == "command": selMutator.prefStubs = self.getPrefStubs(selMutator.stubs, sizePref, timePref, binaryPref, filePref) break if selMutator is None: printError(f"Selected mutator '{userMutator}' not found") else: prefMutators = self.getPrefMutators(mutators, sizePref, timePref, binaryPref, filePref, prevCmdOb) selMutator = self.randGen.randSelect(prefMutators) if selMutator is not None and selMutator.mutatorType == "command": selMutator.deobStub = self.choosePrefStub(selMutator.prefStubs, sizePref, timePref, binaryPref, filePref, userStub) if selMutator.deobStub: selMutator.deobStub.mangler = selMutator.mangler selMutator.deobStub.randGen = selMutator.mangler.randGen else: printError(f"All of '{selMutator.longName}'s Stubs do not fulfil your requirements") return selMutator
MIT License
olitheolix/aiokubernetes
aiokubernetes/models/policy_v1beta1_pod_security_policy_spec.py
PolicyV1beta1PodSecurityPolicySpec.fs_group
python
def fs_group(self, fs_group): if fs_group is None: raise ValueError("Invalid value for `fs_group`, must not be `None`") self._fs_group = fs_group
Sets the fs_group of this PolicyV1beta1PodSecurityPolicySpec. FSGroup is the strategy that will dictate what fs group is used by the SecurityContext. # noqa: E501 :param fs_group: The fs_group of this PolicyV1beta1PodSecurityPolicySpec. # noqa: E501 :type: PolicyV1beta1FSGroupStrategyOptions
https://github.com/olitheolix/aiokubernetes/blob/266718b210dff2a9b2212183261ea89adf89115e/aiokubernetes/models/policy_v1beta1_pod_security_policy_spec.py#L288-L299
import pprint import re from aiokubernetes.models.policy_v1beta1_allowed_flex_volume import PolicyV1beta1AllowedFlexVolume from aiokubernetes.models.policy_v1beta1_allowed_host_path import PolicyV1beta1AllowedHostPath from aiokubernetes.models.policy_v1beta1_fs_group_strategy_options import PolicyV1beta1FSGroupStrategyOptions from aiokubernetes.models.policy_v1beta1_host_port_range import PolicyV1beta1HostPortRange from aiokubernetes.models.policy_v1beta1_run_as_user_strategy_options import PolicyV1beta1RunAsUserStrategyOptions from aiokubernetes.models.policy_v1beta1_se_linux_strategy_options import PolicyV1beta1SELinuxStrategyOptions from aiokubernetes.models.policy_v1beta1_supplemental_groups_strategy_options import PolicyV1beta1SupplementalGroupsStrategyOptions class PolicyV1beta1PodSecurityPolicySpec(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'allow_privilege_escalation': 'bool', 'allowed_capabilities': 'list[str]', 'allowed_flex_volumes': 'list[PolicyV1beta1AllowedFlexVolume]', 'allowed_host_paths': 'list[PolicyV1beta1AllowedHostPath]', 'default_add_capabilities': 'list[str]', 'default_allow_privilege_escalation': 'bool', 'fs_group': 'PolicyV1beta1FSGroupStrategyOptions', 'host_ipc': 'bool', 'host_network': 'bool', 'host_pid': 'bool', 'host_ports': 'list[PolicyV1beta1HostPortRange]', 'privileged': 'bool', 'read_only_root_filesystem': 'bool', 'required_drop_capabilities': 'list[str]', 'run_as_user': 'PolicyV1beta1RunAsUserStrategyOptions', 'se_linux': 'PolicyV1beta1SELinuxStrategyOptions', 'supplemental_groups': 'PolicyV1beta1SupplementalGroupsStrategyOptions', 'volumes': 'list[str]' } attribute_map = { 'allow_privilege_escalation': 'allowPrivilegeEscalation', 'allowed_capabilities': 'allowedCapabilities', 'allowed_flex_volumes': 'allowedFlexVolumes', 'allowed_host_paths': 'allowedHostPaths', 'default_add_capabilities': 'defaultAddCapabilities', 'default_allow_privilege_escalation': 'defaultAllowPrivilegeEscalation', 'fs_group': 'fsGroup', 'host_ipc': 'hostIPC', 'host_network': 'hostNetwork', 'host_pid': 'hostPID', 'host_ports': 'hostPorts', 'privileged': 'privileged', 'read_only_root_filesystem': 'readOnlyRootFilesystem', 'required_drop_capabilities': 'requiredDropCapabilities', 'run_as_user': 'runAsUser', 'se_linux': 'seLinux', 'supplemental_groups': 'supplementalGroups', 'volumes': 'volumes' } def __init__(self, allow_privilege_escalation=None, allowed_capabilities=None, allowed_flex_volumes=None, allowed_host_paths=None, default_add_capabilities=None, default_allow_privilege_escalation=None, fs_group=None, host_ipc=None, host_network=None, host_pid=None, host_ports=None, privileged=None, read_only_root_filesystem=None, required_drop_capabilities=None, run_as_user=None, se_linux=None, supplemental_groups=None, volumes=None): self._allow_privilege_escalation = None self._allowed_capabilities = None self._allowed_flex_volumes = None self._allowed_host_paths = None self._default_add_capabilities = None self._default_allow_privilege_escalation = None self._fs_group = None self._host_ipc = None self._host_network = None self._host_pid = None self._host_ports = None self._privileged = None self._read_only_root_filesystem = None self._required_drop_capabilities = None self._run_as_user = None self._se_linux = None self._supplemental_groups = None self._volumes = None self.discriminator = None if allow_privilege_escalation is not None: self.allow_privilege_escalation = allow_privilege_escalation if allowed_capabilities is not None: self.allowed_capabilities = allowed_capabilities if allowed_flex_volumes is not None: self.allowed_flex_volumes = allowed_flex_volumes if allowed_host_paths is not None: self.allowed_host_paths = allowed_host_paths if default_add_capabilities is not None: self.default_add_capabilities = default_add_capabilities if default_allow_privilege_escalation is not None: self.default_allow_privilege_escalation = default_allow_privilege_escalation self.fs_group = fs_group if host_ipc is not None: self.host_ipc = host_ipc if host_network is not None: self.host_network = host_network if host_pid is not None: self.host_pid = host_pid if host_ports is not None: self.host_ports = host_ports if privileged is not None: self.privileged = privileged if read_only_root_filesystem is not None: self.read_only_root_filesystem = read_only_root_filesystem if required_drop_capabilities is not None: self.required_drop_capabilities = required_drop_capabilities self.run_as_user = run_as_user self.se_linux = se_linux self.supplemental_groups = supplemental_groups if volumes is not None: self.volumes = volumes @property def allow_privilege_escalation(self): return self._allow_privilege_escalation @allow_privilege_escalation.setter def allow_privilege_escalation(self, allow_privilege_escalation): self._allow_privilege_escalation = allow_privilege_escalation @property def allowed_capabilities(self): return self._allowed_capabilities @allowed_capabilities.setter def allowed_capabilities(self, allowed_capabilities): self._allowed_capabilities = allowed_capabilities @property def allowed_flex_volumes(self): return self._allowed_flex_volumes @allowed_flex_volumes.setter def allowed_flex_volumes(self, allowed_flex_volumes): self._allowed_flex_volumes = allowed_flex_volumes @property def allowed_host_paths(self): return self._allowed_host_paths @allowed_host_paths.setter def allowed_host_paths(self, allowed_host_paths): self._allowed_host_paths = allowed_host_paths @property def default_add_capabilities(self): return self._default_add_capabilities @default_add_capabilities.setter def default_add_capabilities(self, default_add_capabilities): self._default_add_capabilities = default_add_capabilities @property def default_allow_privilege_escalation(self): return self._default_allow_privilege_escalation @default_allow_privilege_escalation.setter def default_allow_privilege_escalation(self, default_allow_privilege_escalation): self._default_allow_privilege_escalation = default_allow_privilege_escalation @property def fs_group(self): return self._fs_group @fs_group.setter
Apache License 2.0
kiminewt/pyshark
src/pyshark/capture/live_capture.py
LiveCapture.sniff_continuously
python
def sniff_continuously(self, packet_count=None): return self._packets_from_tshark_sync(packet_count=packet_count)
Captures from the set interface, returning a generator which returns packets continuously. Can be used as follows: for packet in capture.sniff_continuously(): print('Woo, another packet:', packet) Note: you can also call capture.apply_on_packets(packet_callback) which should have a slight performance boost. :param packet_count: an amount of packets to capture, then stop.
https://github.com/kiminewt/pyshark/blob/336eca58ef8567f18186f7a563b8202e8b6ea943/src/pyshark/capture/live_capture.py#L100-L112
import os import asyncio import sys from distutils.version import LooseVersion from pyshark.capture.capture import Capture from pyshark.tshark.tshark import get_tshark_interfaces, get_process_path class LiveCapture(Capture): def __init__(self, interface=None, bpf_filter=None, display_filter=None, only_summaries=False, decryption_key=None, encryption_type='wpa-pwk', output_file=None, decode_as=None, disable_protocol=None, tshark_path=None, override_prefs=None, capture_filter=None, monitor_mode=False, use_json=False, include_raw=False, eventloop=None, custom_parameters=None, debug=False): super(LiveCapture, self).__init__(display_filter=display_filter, only_summaries=only_summaries, decryption_key=decryption_key, encryption_type=encryption_type, output_file=output_file, decode_as=decode_as, disable_protocol=disable_protocol, tshark_path=tshark_path, override_prefs=override_prefs, capture_filter=capture_filter, use_json=use_json, include_raw=include_raw, eventloop=eventloop, custom_parameters=custom_parameters, debug=debug) self.bpf_filter = bpf_filter self.monitor_mode = monitor_mode if sys.platform == "win32" and monitor_mode: raise WindowsError("Monitor mode is not supported by the Windows platform") if interface is None: self.interfaces = get_tshark_interfaces(tshark_path) elif isinstance(interface, str): self.interfaces = [interface] else: self.interfaces = interface def get_parameters(self, packet_count=None): params = super(LiveCapture, self).get_parameters(packet_count=packet_count) params += ["-r", "-"] return params def _get_dumpcap_parameters(self): params = ["-q"] if self._get_tshark_version() < LooseVersion("2.5.0"): params += ["-P"] if self.bpf_filter: params += ["-f", self.bpf_filter] if self.monitor_mode: params += ["-I"] for interface in self.interfaces: params += ["-i", interface] params += ["-w", "-"] return params async def _get_tshark_process(self, packet_count=None, stdin=None): read, write = os.pipe() dumpcap_params = [get_process_path(process_name="dumpcap", tshark_path=self.tshark_path)] + self._get_dumpcap_parameters() self._log.debug("Creating Dumpcap subprocess with parameters: %s" % " ".join(dumpcap_params)) dumpcap_process = await asyncio.create_subprocess_exec(*dumpcap_params, stdout=write, stderr=self._stderr_output()) self._created_new_process(dumpcap_params, dumpcap_process, process_name="Dumpcap") tshark = await super(LiveCapture, self)._get_tshark_process(packet_count=packet_count, stdin=read) return tshark sniff = Capture.load_packets
MIT License
wavefronthq/python-client
wavefront_api_client/models/stats_model_internal_use.py
StatsModelInternalUse.compacted_points
python
def compacted_points(self): return self._compacted_points
Gets the compacted_points of this StatsModelInternalUse. # noqa: E501 :return: The compacted_points of this StatsModelInternalUse. # noqa: E501 :rtype: int
https://github.com/wavefronthq/python-client/blob/e410ce0dd8a2334e995456f4f3d44e0f04664a3a/wavefront_api_client/models/stats_model_internal_use.py#L209-L216
import pprint import re import six from wavefront_api_client.configuration import Configuration class StatsModelInternalUse(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'buffer_keys': 'int', 'cached_compacted_keys': 'int', 'compacted_keys': 'int', 'compacted_points': 'int', 'cpu_ns': 'int', 'distributions': 'int', 'edges': 'int', 'hosts_used': 'int', 'keys': 'int', 'latency': 'int', 'metrics': 'int', 'metrics_used': 'int', 'points': 'int', 'queries': 'int', 'query_tasks': 'int', 's3_keys': 'int', 'skipped_compacted_keys': 'int', 'spans': 'int', 'summaries': 'int' } attribute_map = { 'buffer_keys': 'buffer_keys', 'cached_compacted_keys': 'cached_compacted_keys', 'compacted_keys': 'compacted_keys', 'compacted_points': 'compacted_points', 'cpu_ns': 'cpu_ns', 'distributions': 'distributions', 'edges': 'edges', 'hosts_used': 'hosts_used', 'keys': 'keys', 'latency': 'latency', 'metrics': 'metrics', 'metrics_used': 'metrics_used', 'points': 'points', 'queries': 'queries', 'query_tasks': 'query_tasks', 's3_keys': 's3_keys', 'skipped_compacted_keys': 'skipped_compacted_keys', 'spans': 'spans', 'summaries': 'summaries' } def __init__(self, buffer_keys=None, cached_compacted_keys=None, compacted_keys=None, compacted_points=None, cpu_ns=None, distributions=None, edges=None, hosts_used=None, keys=None, latency=None, metrics=None, metrics_used=None, points=None, queries=None, query_tasks=None, s3_keys=None, skipped_compacted_keys=None, spans=None, summaries=None, _configuration=None): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._buffer_keys = None self._cached_compacted_keys = None self._compacted_keys = None self._compacted_points = None self._cpu_ns = None self._distributions = None self._edges = None self._hosts_used = None self._keys = None self._latency = None self._metrics = None self._metrics_used = None self._points = None self._queries = None self._query_tasks = None self._s3_keys = None self._skipped_compacted_keys = None self._spans = None self._summaries = None self.discriminator = None if buffer_keys is not None: self.buffer_keys = buffer_keys if cached_compacted_keys is not None: self.cached_compacted_keys = cached_compacted_keys if compacted_keys is not None: self.compacted_keys = compacted_keys if compacted_points is not None: self.compacted_points = compacted_points if cpu_ns is not None: self.cpu_ns = cpu_ns if distributions is not None: self.distributions = distributions if edges is not None: self.edges = edges if hosts_used is not None: self.hosts_used = hosts_used if keys is not None: self.keys = keys if latency is not None: self.latency = latency if metrics is not None: self.metrics = metrics if metrics_used is not None: self.metrics_used = metrics_used if points is not None: self.points = points if queries is not None: self.queries = queries if query_tasks is not None: self.query_tasks = query_tasks if s3_keys is not None: self.s3_keys = s3_keys if skipped_compacted_keys is not None: self.skipped_compacted_keys = skipped_compacted_keys if spans is not None: self.spans = spans if summaries is not None: self.summaries = summaries @property def buffer_keys(self): return self._buffer_keys @buffer_keys.setter def buffer_keys(self, buffer_keys): self._buffer_keys = buffer_keys @property def cached_compacted_keys(self): return self._cached_compacted_keys @cached_compacted_keys.setter def cached_compacted_keys(self, cached_compacted_keys): self._cached_compacted_keys = cached_compacted_keys @property def compacted_keys(self): return self._compacted_keys @compacted_keys.setter def compacted_keys(self, compacted_keys): self._compacted_keys = compacted_keys @property
Apache License 2.0
czbiohub/microdl
tests/networks/image3D_to_vector_net_tests.py
TestImage3DToVectorNet.setUp
python
def setUp(self): self.network_config = {'num_input_channels': 1, 'data_format': 'channels_first', 'height': 64, 'width': 64, 'depth': 64, 'batch_norm': True, 'dropout': 0.2, 'pooling_type': 'average', 'block_sequence': 'conv-bn-activation', 'num_initial_filters': 16, 'num_convs_per_block': 2, 'residual': True, 'dense': { 'type': 'dense', 'dropout': 0.5, 'regression_length': 7, 'kernel_regularizer': { 'type': 'l2', 'lambda': 0.0001 }, }, 'final_activation': 'linear'} self.net = Image3DToVectorNet(self.network_config) inputs, outputs = self.net.build_net() self.model = Model(inputs, outputs) self.model_layers = self.model.layers
Set up network_config, model and model layers
https://github.com/czbiohub/microdl/blob/d321807137f986a88f75258e44415206c7f6e481/tests/networks/image3D_to_vector_net_tests.py#L12-L41
from keras import Model import nose.tools import numpy as np import unittest from micro_dl.networks import Image3DToVectorNet class TestImage3DToVectorNet(unittest.TestCase):
MIT License
googlecloudplatform/ml-on-gcp
example_zoo/tensorflow/probability/deep_exponential_family/trainer/deep_exponential_family.py
trainable_gamma
python
def trainable_gamma(shape, min_concentration=1e-3, min_scale=1e-5, name=None): with tf.compat.v1.variable_scope(None, default_name="trainable_gamma"): unconstrained_concentration = tf.compat.v1.get_variable( "unconstrained_concentration", shape, initializer=tf.compat.v1.initializers.random_normal( mean=0.5, stddev=0.1)) unconstrained_scale = tf.compat.v1.get_variable( "unconstrained_scale", shape, initializer=tf.compat.v1.initializers.random_normal(stddev=0.1)) concentration = tf.maximum(tf.nn.softplus(unconstrained_concentration), min_concentration) rate = tf.maximum(1. / tf.nn.softplus(unconstrained_scale), 1. / min_scale) rv = ed.Gamma(concentration=concentration, rate=rate, name=name) return rv
Learnable Gamma via concentration and scale parameterization.
https://github.com/googlecloudplatform/ml-on-gcp/blob/ffd88931674e08ef6b0b20de27700ed1da61772c/example_zoo/tensorflow/probability/deep_exponential_family/trainer/deep_exponential_family.py#L129-L145
from __future__ import absolute_import from __future__ import division from __future__ import print_function import csv import os import time from absl import flags flags.DEFINE_string(name="job-dir", default="/tmp", help="AI Platform Training passes this to the training script.") import numpy as np from six.moves import urllib import tensorflow as tf from tensorflow_probability import edward2 as ed flags.DEFINE_float("learning_rate", default=1e-4, help="Initial learning rate.") flags.DEFINE_integer("max_steps", default=200000, help="Number of training steps to run.") flags.DEFINE_list("layer_sizes", default=["100", "30", "15"], help="Comma-separated list denoting number of latent " "variables (stochastic units) per layer.") flags.DEFINE_float("shape", default=0.1, help="Shape hyperparameter for Gamma priors on latents.") flags.DEFINE_string("data_dir", default=os.path.join(os.getenv("TEST_TMPDIR", "/tmp"), "deep_exponential_family/data"), help="Directory where data is stored (if using real data).") flags.DEFINE_string("model_dir", default=os.path.join(os.getenv("TEST_TMPDIR", "/tmp"), "deep_exponential_family/"), help="Directory to put the model's fit.") flags.DEFINE_bool("fake_data", default=None, help="If true, uses fake data. Defaults to real data.") FLAGS = flags.FLAGS def deep_exponential_family(data_size, feature_size, units, shape): w2 = ed.Gamma(0.1, 0.3, sample_shape=[units[2], units[1]], name="w2") w1 = ed.Gamma(0.1, 0.3, sample_shape=[units[1], units[0]], name="w1") w0 = ed.Gamma(0.1, 0.3, sample_shape=[units[0], feature_size], name="w0") z2 = ed.Gamma(0.1, 0.1, sample_shape=[data_size, units[2]], name="z2") z1 = ed.Gamma(shape, shape / tf.matmul(z2, w2), name="z1") z0 = ed.Gamma(shape, shape / tf.matmul(z1, w1), name="z0") x = ed.Poisson(tf.matmul(z0, w0), name="x") return x def trainable_positive_deterministic(shape, min_loc=1e-3, name=None): with tf.compat.v1.variable_scope( None, default_name="trainable_positive_deterministic"): unconstrained_loc = tf.compat.v1.get_variable("unconstrained_loc", shape) loc = tf.maximum(tf.nn.softplus(unconstrained_loc), min_loc) rv = ed.Deterministic(loc=loc, name=name) return rv
Apache License 2.0
jrieke/year-on-github
app/templates.py
fineprint
python
def fineprint(runtime: Union[float, None] = None): limits = github_reader.rate_limit_info() s = """ <p align="right" id="fineprint"> Core: {core_remaining} (reset in {core_reset})<br> GraphQL: {graphql_remaining} (reset in {graphql_reset})<br> """.format( **limits ) if runtime is not None: s += f"Runtime: {runtime:.2f} s" s += "</p>" return s
Show rate limits and runtime of the last action as fineprint.
https://github.com/jrieke/year-on-github/blob/a0aab8a465269fb17b88418fca3f67f33e4bd18d/app/templates.py#L36-L49
from typing import Union, Dict import re import urllib import github_reader USER_TEMPLATE = """ <p id="tweet"> My year on <a href="https://twitter.com/search?q=%23Github2020">#Github2020</a> 🐙 {username} <br><br> 📬 Commits/Issues/PRs: {contributions}<br> 🏝️ Repos contributed to: {repos_contributed_to}<br> ⭐ New stars: {new_stars}<br> 🔥 Hottest: {hottest} <br><br> Share yours: <a href="https://gh2020.jrieke.com">gh2020.jrieke.com</a> | Built by <a href="https://twitter.com/jrieke">@jrieke</a> w/ <a href="https://twitter.com/streamlit">@streamlit</a> </p> """ ORG_TEMPLATE = """ <p id="tweet"> Our year on <a href="https://twitter.com/search?q=%23Github2020">#Github2020</a> 🐙 {username} <br><br> 👷 Contributors: {repos_contributed_to}<br> ⭐ New stars: {new_stars}<br> 🔥 Hottest: {hottest} <br><br> Share yours: <a href="https://gh2020.jrieke.com">gh2020.jrieke.com</a> | Built by <a href="https://twitter.com/jrieke">@jrieke</a> w/ <a href="https://twitter.com/streamlit">@streamlit</a> </p> """
MIT License
rlworkgroup/garage
src/garage/tf/q_functions/discrete_cnn_q_function.py
DiscreteCNNQFunction.build
python
def build(self, state_input, name): augmented_state_input = state_input if isinstance(self._env_spec.observation_space, akro.Image): augmented_state_input = tf.cast(state_input, tf.float32) / 255.0 return super().build(augmented_state_input, name=name).outputs
Build the symbolic graph for q-network. Args: state_input (tf.Tensor): The state input tf.Tensor to the network. name (str): Network variable scope. Return: tf.Tensor: The tf.Tensor output of Discrete CNN QFunction.
https://github.com/rlworkgroup/garage/blob/3a578852c392cecde5b7c9786aa182d74f6df1d4/src/garage/tf/q_functions/discrete_cnn_q_function.py#L202-L216
import akro import tensorflow as tf from garage.experiment import deterministic from garage.tf.models import (CNNModel, CNNModelWithMaxPooling, MLPDuelingModel, MLPModel, Sequential) class DiscreteCNNQFunction(Sequential): def __init__(self, env_spec, filters, strides, hidden_sizes=(256, ), name=None, padding='SAME', max_pooling=False, pool_strides=(2, 2), pool_shapes=(2, 2), cnn_hidden_nonlinearity=tf.nn.relu, hidden_nonlinearity=tf.nn.relu, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), dueling=False, layer_normalization=False): if not isinstance(env_spec.observation_space, akro.Box) or not len(env_spec.observation_space.shape) in (2, 3): raise ValueError( '{} can only process 2D, 3D akro.Image or' ' akro.Box observations, but received an env_spec with ' 'observation_space of type {} and shape {}'.format( type(self).__name__, type(env_spec.observation_space).__name__, env_spec.observation_space.shape)) self._env_spec = env_spec self._action_dim = env_spec.action_space.n self._filters = filters self._strides = strides self._hidden_sizes = hidden_sizes self._padding = padding self._max_pooling = max_pooling self._pool_strides = pool_strides self._pool_shapes = pool_shapes self._cnn_hidden_nonlinearity = cnn_hidden_nonlinearity self._hidden_nonlinearity = hidden_nonlinearity self._hidden_w_init = hidden_w_init self._hidden_b_init = hidden_b_init self._output_nonlinearity = output_nonlinearity self._output_w_init = output_w_init self._output_b_init = output_b_init self._layer_normalization = layer_normalization self._dueling = dueling self.obs_dim = self._env_spec.observation_space.shape action_dim = self._env_spec.action_space.flat_dim if not max_pooling: cnn_model = CNNModel(input_dim=self.obs_dim, filters=filters, strides=strides, padding=padding, hidden_nonlinearity=cnn_hidden_nonlinearity) else: cnn_model = CNNModelWithMaxPooling( input_dim=self.obs_dim, filters=filters, strides=strides, padding=padding, pool_strides=pool_strides, pool_shapes=pool_shapes, hidden_nonlinearity=cnn_hidden_nonlinearity) if not dueling: output_model = MLPModel(output_dim=action_dim, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, layer_normalization=layer_normalization) else: output_model = MLPDuelingModel( output_dim=action_dim, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, layer_normalization=layer_normalization) super().__init__(cnn_model, output_model, name=name) self._network = None self._initialize() def _initialize(self): if isinstance(self._env_spec.observation_space, akro.Image): obs_ph = tf.compat.v1.placeholder(tf.uint8, (None, ) + self.obs_dim, name='obs') augmented_obs_ph = tf.cast(obs_ph, tf.float32) / 255.0 else: obs_ph = tf.compat.v1.placeholder(tf.float32, (None, ) + self.obs_dim, name='obs') augmented_obs_ph = obs_ph self._network = super().build(augmented_obs_ph) self._obs_input = obs_ph @property def q_vals(self): return self._network.outputs @property def input(self): return self._obs_input
MIT License
neoacheron/midea-ac-py
midea.py
MideaClimateACDevice.async_set_fan_mode
python
async def async_set_fan_mode(self, fan_mode): from midea.device import air_conditioning_device as ac self._device.fan_speed = ac.fan_speed_enum[fan_mode] self._changed = True self.async_schedule_update_ha_state()
Set new target temperature.
https://github.com/neoacheron/midea-ac-py/blob/d8cac1fed8e32abefbf7a486b4edf359a31bc516/midea.py#L196-L201
import logging import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.climate import ClimateDevice, PLATFORM_SCHEMA from homeassistant.components.climate.const import ( SUPPORT_TARGET_TEMPERATURE, SUPPORT_TARGET_TEMPERATURE_HIGH, SUPPORT_TARGET_TEMPERATURE_LOW, SUPPORT_AWAY_MODE, SUPPORT_FAN_MODE, SUPPORT_OPERATION_MODE, SUPPORT_SWING_MODE, SUPPORT_ON_OFF) from homeassistant.const import CONF_USERNAME, CONF_PASSWORD, TEMP_CELSIUS, TEMP_FAHRENHEIT, ATTR_TEMPERATURE REQUIREMENTS = ['midea==0.1.7', 'pycryptodome==3.7.0'] VERSION = '0.1.7' _LOGGER = logging.getLogger(__name__) CONF_APP_KEY = 'app_key' CONF_TEMP_STEP = 'temp_step' CONF_INCLUDE_OFF_AS_STATE = 'include_off_as_state' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_APP_KEY): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_TEMP_STEP, default=1.0): vol.Coerce(float), vol.Optional(CONF_INCLUDE_OFF_AS_STATE, default=True): vol.Coerce(bool) }) SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_AWAY_MODE | SUPPORT_FAN_MODE | SUPPORT_OPERATION_MODE | SUPPORT_SWING_MODE | SUPPORT_TARGET_TEMPERATURE_HIGH | SUPPORT_TARGET_TEMPERATURE_LOW async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): from midea.client import client as midea_client app_key = config.get(CONF_APP_KEY) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) temp_step = config.get(CONF_TEMP_STEP) include_off_as_state = config.get(CONF_INCLUDE_OFF_AS_STATE) client = midea_client(app_key, username, password) devices = client.devices() entities = [] for device in devices: if(device.type == 0xAC): entities.append(MideaClimateACDevice( device, temp_step, include_off_as_state)) else: _LOGGER.error( "Unsupported device type: 0x{:02x}".format(device.type)) async_add_entities(entities) class MideaClimateACDevice(ClimateDevice): def __init__(self, device, temp_step: float, include_off_as_state: bool): from midea.device import air_conditioning_device as ac self._operation_list = ac.operational_mode_enum.list() self._fan_list = ac.fan_speed_enum.list() self._swing_list = ac.swing_mode_enum.list() support_flags = SUPPORT_FLAGS if not include_off_as_state: support_flags != SUPPORT_ON_OFF else: self._operation_list.append("off") self._support_flags = support_flags self._device = device self._unit_of_measurement = TEMP_CELSIUS self._target_temperature_step = temp_step self._include_off_as_state = include_off_as_state self._changed = False async def async_update(self): if self._changed: await self.hass.async_add_executor_job(self._device.apply) self._changed = False else: await self.hass.async_add_executor_job(self._device.refresh) @property def available(self): return self._device.online @property def supported_features(self): return self._support_flags @property def target_temperature_step(self): return self._target_temperature_step @property def operation_list(self): return self._operation_list @property def fan_list(self): return self._fan_list @property def swing_list(self): return self._swing_list @property def should_poll(self): return True @property def name(self): return "midea_{}".format(self._device.id) @property def temperature_unit(self): return self._unit_of_measurement @property def current_temperature(self): return self._device.indoor_temperature @property def target_temperature(self): return self._device.target_temperature @property def current_operation(self): if self._include_off_as_state and not self._device.power_state: return "off" return self._device.operational_mode.name @property def current_fan_mode(self): return self._device.fan_speed.name @property def current_swing_mode(self): return self._device.swing_mode.name @property def is_away_mode_on(self): return self._device.eco_mode @property def is_on(self): return self._device.power_state async def async_set_temperature(self, **kwargs): if kwargs.get(ATTR_TEMPERATURE) is not None: self._device.target_temperature = int(kwargs.get(ATTR_TEMPERATURE)) self._changed = True self.async_schedule_update_ha_state() async def async_set_swing_mode(self, swing_mode): from midea.device import air_conditioning_device as ac self._device.swing_mode = ac.swing_mode_enum[swing_mode] self._changed = True self.async_schedule_update_ha_state()
MIT License
notmatthancock/level-set-machine-learning
lsml/score_functions.py
jaccard
python
def jaccard(u, seg, threshold=0.0): if seg.dtype != numpy.bool: msg = "`seg` dtype ({}) was not of type bool" raise ValueError(msg.format(seg.dtype)) thresholded = u > threshold intersection = float((thresholded & seg).sum()) union = float((thresholded | seg).sum()) if union == 0: return 1.0 else: return intersection / union
Compute the Jaccard overlap score between `u > threshold` and `seg` (Also known as the "intersection over union")
https://github.com/notmatthancock/level-set-machine-learning/blob/38460e514d48f3424bb8d3bd58cb3eb330153e64/lsml/score_functions.py#L4-L23
import numpy
BSD 3-Clause New or Revised License
smartelect/smartelect
bulk_sms/models.py
Broadcast.get_numbers_for_staff
python
def get_numbers_for_staff(): return StaffPhone.objects.values_list('phone_number', flat=True).distinct()
Returns a list of all staff members' phone numbers in the StaffPhone table.
https://github.com/smartelect/smartelect/blob/d6d35f2fa8f60e756ad5247f8f0a5f05830e92f8/bulk_sms/models.py#L248-L250
import datetime import random from django.db import models from django.conf import settings from django.core.exceptions import ValidationError from django.template.loader import render_to_string from django.urls import reverse from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy as _, ugettext from libya_elections.abstract import AbstractTimestampTrashBinModel, TrashBinManager from libya_elections.libya_bread import RegistrationCenterFormatterMixin, CreatedByFormatterMixin, ReviewedByFormatterMixin from libya_elections.phone_numbers import PhoneNumberField from polling_reports.models import StaffPhone from register.models import Registration, SMS class BatchManager(TrashBinManager): def get_next_batch(self): active_batches = self.get_queryset().filter(status=Batch.APPROVED) return active_batches.order_by('-priority').first() class Batch(AbstractTimestampTrashBinModel): PENDING = 1 APPROVED = 2 REJECTED = 3 COMPLETED = 4 PROCESSING = 5 UPLOADING = 6 STATUS_CHOICES = ( (UPLOADING, _('Uploading')), (PENDING, _('Pending Approval')), (APPROVED, _('Approved')), (REJECTED, _('Rejected')), (COMPLETED, _('Completed')), (PROCESSING, _('Processing')), ) PRIORITY_BATCH = 0 PRIORITY_TIME_CRITICAL = 100 name = models.CharField(max_length=255) description = models.TextField(blank=True) created_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='batches_created', on_delete=models.CASCADE) reviewed_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='batches_reviewed', null=True, blank=True, on_delete=models.CASCADE) status = models.IntegerField(choices=STATUS_CHOICES, default=PENDING) errors = models.IntegerField(default=0) priority = models.IntegerField(default=PRIORITY_BATCH, help_text="Batches with higher priority are sent first") objects = BatchManager() class Meta: verbose_name = _("batch") verbose_name_plural = _("batches") ordering = ['-creation_date'] def __str__(self): return self.name def time_remaining(self): num_remaining = self.messages.unsent().count() if num_remaining: seconds_remaining = num_remaining / settings.BULKSMS_DEFAULT_MESSAGES_PER_SECOND if seconds_remaining < 1: seconds_remaining = 1 return datetime.timedelta(seconds=seconds_remaining) else: return 0 def random_n_messages(self, n=10): message_count = self.messages.unsent().count() max_start = n if n <= message_count else message_count start = random.randint(0, message_count - max_start) return self.messages.unsent()[start:start + n] def add_messages(self, generator): NUM_TO_BULK_CREATE = 10000 messages_to_create = [] for phone_number, message, from_shortcode in generator: kwargs = dict(phone_number=phone_number, message=message) if from_shortcode: kwargs['from_shortcode'] = from_shortcode messages_to_create.append(BulkMessage(batch=self, **kwargs)) if len(messages_to_create) == NUM_TO_BULK_CREATE: BulkMessage.objects.bulk_create(messages_to_create) messages_to_create = [] if messages_to_create: BulkMessage.objects.bulk_create(messages_to_create) class BulkMessageQuerySet(models.query.QuerySet): def active(self): return self.order_by('creation_date') def unsent(self): return self.active().filter(sms=None) def sent(self): return self.active().filter(sms__isnull=False) class BulkMessageManager(TrashBinManager): queryset = BulkMessageQuerySet def active(self): return self.get_queryset().active() def unsent(self): return self.get_queryset().unsent() def sent(self): return self.get_queryset().sent() class BulkMessage(AbstractTimestampTrashBinModel): phone_number = PhoneNumberField(_('phone number')) from_shortcode = models.CharField(_('from shortcode'), max_length=5, default=settings.REGISTRATION_SHORT_CODE, help_text=_('What shortcode should this appear to be from?')) message = models.TextField(_('message')) batch = models.ForeignKey(Batch, related_name='messages', verbose_name=_('batch'), on_delete=models.CASCADE) sms = models.OneToOneField(SMS, null=True, blank=True, verbose_name=_('sms'), on_delete=models.CASCADE) objects = BulkMessageManager() class Meta: verbose_name = _("bulk message") verbose_name_plural = _("bulk messages") ordering = ['-creation_date'] def __str__(self): return 'Message to %s from batch %s' % (self.phone_number, self.batch) def clean(self): if self.from_shortcode not in settings.SHORT_CODES: raise ValidationError(_('Invalid shortcode: Valid values are: {}').format( list(settings.SHORT_CODES))) class Broadcast(CreatedByFormatterMixin, RegistrationCenterFormatterMixin, ReviewedByFormatterMixin, AbstractTimestampTrashBinModel): STAFF_ONLY = 'staff' SINGLE_CENTER = 'single_center' ALL_REGISTRANTS = 'all_centers' CUSTOM = 'custom' CUSTOM_CHOICE = (CUSTOM, _("Custom")) MINIMUM_AUDIENCE = ( (STAFF_ONLY, _("Staff")), (SINGLE_CENTER, _("Registrants in a single center")), (ALL_REGISTRANTS, _("Registrants in the entire voter register")), ) ALL_AUDIENCES = MINIMUM_AUDIENCE + (CUSTOM_CHOICE, ) created_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, related_name='broadcast_created', verbose_name=_('created by'), on_delete=models.CASCADE) reviewed_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, related_name='broadcast_reviewed', verbose_name=_('reviewed by'), on_delete=models.CASCADE) batch = models.OneToOneField(Batch, verbose_name=_('batch'), on_delete=models.CASCADE) audience = models.CharField(_('audience'), max_length=20, choices=ALL_AUDIENCES, default=STAFF_ONLY) center = models.ForeignKey('register.RegistrationCenter', null=True, blank=True, verbose_name=_('registration center'), on_delete=models.CASCADE) message = models.TextField(_('message')) def __str__(self): return self.message def get_absolute_url(self): return reverse('read_broadcast', args=[self.id]) def get_messages(self): if self.audience == self.STAFF_ONLY: from_shortcode = settings.REPORTS_SHORT_CODE else: from_shortcode = None get_phone_numbers = getattr(self, 'get_numbers_for_%s' % self.audience) phone_numbers = get_phone_numbers() for number in phone_numbers: yield number, self.message, from_shortcode @staticmethod def get_numbers_for_all_centers(): registrations = Registration.objects.all() return registrations.values_list('sms__from_number', flat=True).distinct() def get_numbers_for_single_center(self): registrations = Registration.objects.filter(registration_center=self.center) return registrations.values_list('sms__from_number', flat=True).distinct() @staticmethod
Apache License 2.0
kabkabm/defensegan
datasets/dataset.py
Dataset.__init__
python
def __init__(self, name, data_dir='./data'): self.data_dir = os.path.join(data_dir, name) self.name = name self.images = None self.labels = None
The datasaet default constructor. Args: name: A string, name of the dataset. data_dir (optional): The path of the datasets on disk.
https://github.com/kabkabm/defensegan/blob/7e3feaebf7b9bbf08b1364e400119ef596cd78fd/datasets/dataset.py#L39-L50
import cPickle import os import numpy as np import scipy import scipy.misc class Dataset(object):
Apache License 2.0
hunch/hunch-gift-app
django/template/loaders/app_directories.py
Loader.get_template_sources
python
def get_template_sources(self, template_name, template_dirs=None): if not template_dirs: template_dirs = app_template_dirs for template_dir in template_dirs: try: yield safe_join(template_dir, template_name) except UnicodeDecodeError: raise except ValueError: pass
Returns the absolute paths to "template_name", when appended to each directory in "template_dirs". Any paths that don't lie inside one of the template dirs are excluded from the result set, for security reasons.
https://github.com/hunch/hunch-gift-app/blob/8c7cad24cc0d9900deb4175e6b768c64a3d7adcf/django/template/loaders/app_directories.py#L34-L50
import os import sys from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.template import TemplateDoesNotExist from django.template.loader import BaseLoader from django.utils._os import safe_join from django.utils.importlib import import_module fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() app_template_dirs = [] for app in settings.INSTALLED_APPS: try: mod = import_module(app) except ImportError, e: raise ImproperlyConfigured('ImportError %s: %s' % (app, e.args[0])) template_dir = os.path.join(os.path.dirname(mod.__file__), 'templates') if os.path.isdir(template_dir): app_template_dirs.append(template_dir.decode(fs_encoding)) app_template_dirs = tuple(app_template_dirs) class Loader(BaseLoader): is_usable = True
MIT License
netflix/dispatch
src/dispatch/plugin/models.py
PluginInstance.configuration_schema
python
def configuration_schema(self): plugin = plugins.get(self.plugin.slug) return plugin.configuration_schema.schema()
Renders the plugin's schema to JSON Schema.
https://github.com/netflix/dispatch/blob/e30705938e970d8ef0dfdd04246a3f3004a6a44f/src/dispatch/plugin/models.py#L72-L75
from typing import Any, List, Optional from pydantic import Field, SecretStr from pydantic.json import pydantic_encoder from sqlalchemy.orm import relationship from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy import Column, Integer, String, Boolean, ForeignKey from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy_utils import TSVectorType, StringEncryptedType from sqlalchemy_utils.types.encrypted.encrypted_type import AesEngine from dispatch.database.core import Base from dispatch.config import DISPATCH_ENCRYPTION_KEY from dispatch.models import DispatchBase, ProjectMixin, PrimaryKey from dispatch.plugins.base import plugins from dispatch.project.models import ProjectRead def show_secrets_encoder(obj): if type(obj) == SecretStr: return obj.get_secret_value() else: return pydantic_encoder(obj) class Plugin(Base): __table_args__ = {"schema": "dispatch_core"} id = Column(Integer, primary_key=True) title = Column(String) slug = Column(String, unique=True) description = Column(String) version = Column(String) author = Column(String) author_url = Column(String) type = Column(String) multiple = Column(Boolean) search_vector = Column( TSVectorType( "title", "slug", "type", "description", weights={"title": "A", "slug": "B", "type": "C", "description": "C"}, ) ) class PluginInstance(Base, ProjectMixin): id = Column(Integer, primary_key=True) enabled = Column(Boolean) _configuration = Column( StringEncryptedType(key=str(DISPATCH_ENCRYPTION_KEY), engine=AesEngine, padding="pkcs5") ) plugin_id = Column(Integer, ForeignKey(Plugin.id)) plugin = relationship(Plugin, backref="instances") search_vector = association_proxy("plugin", "search_vector") @property def instance(self): plugin = plugins.get(self.plugin.slug) plugin.configuration = self.configuration return plugin @property
Apache License 2.0
crhenr/easy-shell
easy_shell.py
is_valid
python
def is_valid(host_port): if len(host_port.split(":")) != 2: return False return True
Checks if there are a host and a port.
https://github.com/crhenr/easy-shell/blob/26fe0638820c996825ff381454bcc8f84d022667/easy_shell.py#L49-L55
import ssl import http.server import socketserver from urllib.parse import urlparse PORT = 8080 DOMAIN = "http://127.0.0.1:{}".format(PORT) HTTPS = False KEY_FILE = "keyfile.key" CERT_FILE = "certfile.cert" USAGE = """# Usage # Attacker: nc -l port # Target: curl {}/ip:port | sh\n""".format(DOMAIN)
BSD 3-Clause New or Revised License
jnrbsn/daemonocle
daemonocle/helpers.py
MultiDaemon.stop
python
def stop(self, worker_id=None, timeout=None, force=False): if worker_id is not None: daemons = [self._daemons[worker_id]] else: daemons = self._daemons for daemon in daemons: daemon.stop(timeout=timeout, force=force)
Stop the daemon.
https://github.com/jnrbsn/daemonocle/blob/337827a8e6d25686aca5cd1102d74d83126ae784/daemonocle/helpers.py#L167-L174
import os import posixpath from collections.abc import Callable from concurrent.futures import ThreadPoolExecutor, as_completed from operator import itemgetter import click import psutil from daemonocle._utils import ( format_elapsed_time, json_encode, to_bytes, waitstatus_to_exitcode) from daemonocle.core import Daemon, expose_action, get_action, list_actions from daemonocle.exceptions import DaemonError class FHSDaemon(Daemon): def __init__( self, name=None, prefix='/opt', log_prefix='', **kwargs): if name is not None: self.name = name elif not getattr(self, 'name', None): raise DaemonError('name must be defined for FHSDaemon') kwargs.update({ 'chrootdir': None, 'detach': True, }) prefix = posixpath.realpath(prefix) if prefix == '/opt': kwargs.update({ 'pid_file': '/var/opt/{name}/run/{name}.pid', 'stdout_file': '/var/opt/{name}/log/{log_prefix}out.log', 'stderr_file': '/var/opt/{name}/log/{log_prefix}err.log', }) elif prefix == '/usr/local': kwargs.update({ 'pid_file': '/var/local/run/{name}/{name}.pid', 'stdout_file': '/var/local/log/{name}/{log_prefix}out.log', 'stderr_file': '/var/local/log/{name}/{log_prefix}err.log', }) elif prefix == '/usr': kwargs.update({ 'pid_file': '/var/run/{name}/{name}.pid', 'stdout_file': '/var/log/{name}/{log_prefix}out.log', 'stderr_file': '/var/log/{name}/{log_prefix}err.log', }) else: kwargs.update({ 'pid_file': posixpath.join(prefix, 'run/{name}.pid'), 'stdout_file': posixpath.join( prefix, 'log/{log_prefix}out.log'), 'stderr_file': posixpath.join( prefix, 'log/{log_prefix}err.log'), }) for key in ('pid_file', 'stdout_file', 'stderr_file'): kwargs[key] = kwargs[key].format( name=self.name, log_prefix=log_prefix) if 'work_dir' in kwargs: work_dir = posixpath.realpath(kwargs['work_dir']) if work_dir == prefix and not posixpath.isdir(work_dir): umask = kwargs.get('umask', 0o22) uid = kwargs.get('uid', os.getuid()) gid = kwargs.get('gid', os.getgid()) os.makedirs(work_dir, 0o777 & ~umask) os.chown(work_dir, uid, gid) super(FHSDaemon, self).__init__(**kwargs) class MultiDaemon(object): def __init__(self, num_workers, daemon_cls=Daemon, **daemon_kwargs): if num_workers < 1: raise DaemonError('num_workers must be >= 1 for MultiDaemon') self.num_workers = num_workers self.worker = daemon_kwargs.get('worker', None) self._daemons = [] kwargs_to_format = {'name', 'work_dir'} if issubclass(daemon_cls, FHSDaemon): kwargs_to_format.add('prefix') else: kwargs_to_format.update(('pid_file', 'stdout_file', 'stderr_file')) pid_files = set() for n in range(self.num_workers): kwargs = daemon_kwargs.copy() kwargs.update({ 'chrootdir': None, 'detach': True, }) for key in kwargs_to_format: if key in kwargs: kwargs[key] = kwargs[key].format(n=n) daemon = daemon_cls(**kwargs) daemon.worker_id = n if daemon.pid_file is None: raise DaemonError('pid_file must be defined for MultiDaemon') pid_files.add(daemon.pid_file) self._daemons.append(daemon) if len(pid_files) < self.num_workers: raise DaemonError('PID files must be unique for MultiDaemon') @classmethod def list_actions(cls): return list_actions(cls) def get_action(self, action): return get_action(self, action) def do_action(self, action, *args, **kwargs): func = self.get_action(action) return func(*args, **kwargs) def cli(self, *args, **kwargs): from daemonocle.cli import DaemonCLI cli = DaemonCLI(daemon=self) return cli(*args, **kwargs) @expose_action def start(self, worker_id=None, debug=False, *args, **kwargs): if worker_id is not None: daemons = [self._daemons[worker_id]] else: daemons = self._daemons pid = os.fork() if pid: status = os.waitpid(pid, 0) exit(waitstatus_to_exitcode(status[1])) os.setsid() try: ctx = click.get_current_context() except RuntimeError: ctx = None for daemon in daemons: if ctx is not None: ctx.obj = daemon daemon.start(debug=debug, *args, **kwargs) @expose_action
MIT License
deathbeds/jupyterlab-starters
src/jupyter_starters/tests/conftest.py
tmp_notebook
python
def tmp_notebook(tmp_path): notebook = nbformat.v4.new_notebook() notebook.metadata["kernelspec"] = {"name": "python3"} nb_path = tmp_path / "Untitled.ipynb" nb_path.write_text(nbformat.writes(notebook)) return nb_path
make an empty python notebook on disk
https://github.com/deathbeds/jupyterlab-starters/blob/6ee4a81b6efc76ef017cd4114330e5266060da69/src/jupyter_starters/tests/conftest.py#L61-L67
import nbformat.v4 import pytest import traitlets from jupyter_client.multikernelmanager import AsyncMultiKernelManager from jupyter_server.services.contents.filemanager import FileContentsManager from jupyter_server.services.contents.manager import ContentsManager from traitlets.config import LoggingConfigurable from jupyter_starters.manager import StarterManager class MockApp(LoggingConfigurable): kernel_manager = traitlets.Instance(AsyncMultiKernelManager) contents_manager = traitlets.Instance(ContentsManager) notebook_dir = traitlets.Unicode() @traitlets.default("kernel_manager") def _kernel_manager(self): return AsyncMultiKernelManager(parent=self) @traitlets.default("contents_manager") def _contents_manager(self): return FileContentsManager(root_dir=self.notebook_dir, parent=self) @pytest.fixture def starter_manager(mock_app): return StarterManager(parent=mock_app) @pytest.fixture def mock_app(monkeypatch, tmp_path): monkeypatch.chdir(tmp_path) return MockApp(notebook_dir=str(tmp_path)) @pytest.fixture def example_project(tmp_path): my_module = tmp_path / "my_module" starter_content = my_module / "starter_content" starter_content.mkdir(parents=True) (tmp_path / "README.md").write_text("# My Module\n") (my_module / "__init__.py").write_text("__version__ = '0.0.0\n") (starter_content / "example.txt").write_text("123") return tmp_path @pytest.fixture
BSD 3-Clause New or Revised License
bitmovin/bitmovin-api-sdk-python
bitmovin_api_sdk/models/analytics_ads_impression_sample.py
AnalyticsAdsImpressionSample.ad_description
python
def ad_description(self, ad_description): if ad_description is not None: if not isinstance(ad_description, string_types): raise TypeError("Invalid type for `ad_description`, type has to be `string_types`") self._ad_description = ad_description
Sets the ad_description of this AnalyticsAdsImpressionSample. Ad description :param ad_description: The ad_description of this AnalyticsAdsImpressionSample. :type: string_types
https://github.com/bitmovin/bitmovin-api-sdk-python/blob/79dd938804197151af7cbe5501c7ec1d97872c15/bitmovin_api_sdk/models/analytics_ads_impression_sample.py#L811-L825
from enum import Enum from six import string_types, iteritems from bitmovin_api_sdk.common.poscheck import poscheck_model import pprint import six class AnalyticsAdsImpressionSample(object): @poscheck_model def __init__(self, ad_clickthrough_url=None, ad_description=None, ad_duration=None, ad_fallback_index=None, ad_id=None, ad_id_player=None, ad_impression_id=None, ad_is_persistent=None, ad_module=None, ad_module_version=None, ad_offset=None, ad_playback_height=None, ad_playback_width=None, ad_pod_position=None, ad_position=None, ad_preload_offset=None, ad_replace_content_duration=None, ad_schedule_time=None, ad_skip_after=None, ad_skippable=None, ad_startup_time=None, ad_system=None, ad_tag_path=None, ad_tag_server=None, ad_tag_type=None, ad_tag_url=None, ad_title=None, ad_wrapper_ads_count=None, advertiser_name=None, analytics_version=None, api_framework=None, apiorg_id=None, apiuser_id=None, audio_bitrate=None, autoplay=None, browser=None, browser_is_bot=None, browser_version_major=None, browser_version_minor=None, cdn_provider=None, city=None, click_percentage=None, click_position=None, clicked=None, client_time=None, close_percentage=None, close_position=None, closed=None, completed=None, country=None, creative_ad_id=None, creative_id=None, custom_data_1=None, custom_data_2=None, custom_data_3=None, custom_data_4=None, custom_data_5=None, custom_data_6=None, custom_data_7=None, custom_user_id=None, deal_id=None, device_class=None, device_type=None, domain=None, error_code=None, error_data=None, error_message=None, error_percentage=None, error_position=None, exit_position=None, experiment_name=None, ip_address=None, isp=None, language=None, license_key=None, manifest_download_time=None, media_path=None, media_server=None, media_url=None, midpoint=None, min_suggested_duration=None, operatingsystem=None, operatingsystem_version_major=None, operatingsystem_version_minor=None, page_load_time=None, page_load_type=None, path=None, percentage_in_viewport=None, platform=None, player=None, player_key=None, player_startuptime=None, player_tech=None, player_version=None, play_percentage=None, quartile_1=None, quartile_3=None, region=None, screen_height=None, screen_width=None, size=None, skip_percentage=None, skip_position=None, skipped=None, started=None, stream_format=None, survey_url=None, time=None, time_from_content=None, time_hovered=None, time_in_viewport=None, time_played=None, time_to_content=None, time_to_hover=None, universal_ad_id_registry=None, universal_ad_id_value=None, user_id=None, video_bitrate=None, video_id=None, video_impression_id=None, video_title=None, video_window_height=None, video_window_width=None): self._ad_clickthrough_url = None self._ad_description = None self._ad_duration = None self._ad_fallback_index = None self._ad_id = None self._ad_id_player = None self._ad_impression_id = None self._ad_is_persistent = None self._ad_module = None self._ad_module_version = None self._ad_offset = None self._ad_playback_height = None self._ad_playback_width = None self._ad_pod_position = None self._ad_position = None self._ad_preload_offset = None self._ad_replace_content_duration = None self._ad_schedule_time = None self._ad_skip_after = None self._ad_skippable = None self._ad_startup_time = None self._ad_system = None self._ad_tag_path = None self._ad_tag_server = None self._ad_tag_type = None self._ad_tag_url = None self._ad_title = None self._ad_wrapper_ads_count = None self._advertiser_name = None self._analytics_version = None self._api_framework = None self._apiorg_id = None self._apiuser_id = None self._audio_bitrate = None self._autoplay = None self._browser = None self._browser_is_bot = None self._browser_version_major = None self._browser_version_minor = None self._cdn_provider = None self._city = None self._click_percentage = None self._click_position = None self._clicked = None self._client_time = None self._close_percentage = None self._close_position = None self._closed = None self._completed = None self._country = None self._creative_ad_id = None self._creative_id = None self._custom_data_1 = None self._custom_data_2 = None self._custom_data_3 = None self._custom_data_4 = None self._custom_data_5 = None self._custom_data_6 = None self._custom_data_7 = None self._custom_user_id = None self._deal_id = None self._device_class = None self._device_type = None self._domain = None self._error_code = None self._error_data = None self._error_message = None self._error_percentage = None self._error_position = None self._exit_position = None self._experiment_name = None self._ip_address = None self._isp = None self._language = None self._license_key = None self._manifest_download_time = None self._media_path = None self._media_server = None self._media_url = None self._midpoint = None self._min_suggested_duration = None self._operatingsystem = None self._operatingsystem_version_major = None self._operatingsystem_version_minor = None self._page_load_time = None self._page_load_type = None self._path = None self._percentage_in_viewport = None self._platform = None self._player = None self._player_key = None self._player_startuptime = None self._player_tech = None self._player_version = None self._play_percentage = None self._quartile_1 = None self._quartile_3 = None self._region = None self._screen_height = None self._screen_width = None self._size = None self._skip_percentage = None self._skip_position = None self._skipped = None self._started = None self._stream_format = None self._survey_url = None self._time = None self._time_from_content = None self._time_hovered = None self._time_in_viewport = None self._time_played = None self._time_to_content = None self._time_to_hover = None self._universal_ad_id_registry = None self._universal_ad_id_value = None self._user_id = None self._video_bitrate = None self._video_id = None self._video_impression_id = None self._video_title = None self._video_window_height = None self._video_window_width = None self.discriminator = None if ad_clickthrough_url is not None: self.ad_clickthrough_url = ad_clickthrough_url if ad_description is not None: self.ad_description = ad_description if ad_duration is not None: self.ad_duration = ad_duration if ad_fallback_index is not None: self.ad_fallback_index = ad_fallback_index if ad_id is not None: self.ad_id = ad_id if ad_id_player is not None: self.ad_id_player = ad_id_player if ad_impression_id is not None: self.ad_impression_id = ad_impression_id if ad_is_persistent is not None: self.ad_is_persistent = ad_is_persistent if ad_module is not None: self.ad_module = ad_module if ad_module_version is not None: self.ad_module_version = ad_module_version if ad_offset is not None: self.ad_offset = ad_offset if ad_playback_height is not None: self.ad_playback_height = ad_playback_height if ad_playback_width is not None: self.ad_playback_width = ad_playback_width if ad_pod_position is not None: self.ad_pod_position = ad_pod_position if ad_position is not None: self.ad_position = ad_position if ad_preload_offset is not None: self.ad_preload_offset = ad_preload_offset if ad_replace_content_duration is not None: self.ad_replace_content_duration = ad_replace_content_duration if ad_schedule_time is not None: self.ad_schedule_time = ad_schedule_time if ad_skip_after is not None: self.ad_skip_after = ad_skip_after if ad_skippable is not None: self.ad_skippable = ad_skippable if ad_startup_time is not None: self.ad_startup_time = ad_startup_time if ad_system is not None: self.ad_system = ad_system if ad_tag_path is not None: self.ad_tag_path = ad_tag_path if ad_tag_server is not None: self.ad_tag_server = ad_tag_server if ad_tag_type is not None: self.ad_tag_type = ad_tag_type if ad_tag_url is not None: self.ad_tag_url = ad_tag_url if ad_title is not None: self.ad_title = ad_title if ad_wrapper_ads_count is not None: self.ad_wrapper_ads_count = ad_wrapper_ads_count if advertiser_name is not None: self.advertiser_name = advertiser_name if analytics_version is not None: self.analytics_version = analytics_version if api_framework is not None: self.api_framework = api_framework if apiorg_id is not None: self.apiorg_id = apiorg_id if apiuser_id is not None: self.apiuser_id = apiuser_id if audio_bitrate is not None: self.audio_bitrate = audio_bitrate if autoplay is not None: self.autoplay = autoplay if browser is not None: self.browser = browser if browser_is_bot is not None: self.browser_is_bot = browser_is_bot if browser_version_major is not None: self.browser_version_major = browser_version_major if browser_version_minor is not None: self.browser_version_minor = browser_version_minor if cdn_provider is not None: self.cdn_provider = cdn_provider if city is not None: self.city = city if click_percentage is not None: self.click_percentage = click_percentage if click_position is not None: self.click_position = click_position if clicked is not None: self.clicked = clicked if client_time is not None: self.client_time = client_time if close_percentage is not None: self.close_percentage = close_percentage if close_position is not None: self.close_position = close_position if closed is not None: self.closed = closed if completed is not None: self.completed = completed if country is not None: self.country = country if creative_ad_id is not None: self.creative_ad_id = creative_ad_id if creative_id is not None: self.creative_id = creative_id if custom_data_1 is not None: self.custom_data_1 = custom_data_1 if custom_data_2 is not None: self.custom_data_2 = custom_data_2 if custom_data_3 is not None: self.custom_data_3 = custom_data_3 if custom_data_4 is not None: self.custom_data_4 = custom_data_4 if custom_data_5 is not None: self.custom_data_5 = custom_data_5 if custom_data_6 is not None: self.custom_data_6 = custom_data_6 if custom_data_7 is not None: self.custom_data_7 = custom_data_7 if custom_user_id is not None: self.custom_user_id = custom_user_id if deal_id is not None: self.deal_id = deal_id if device_class is not None: self.device_class = device_class if device_type is not None: self.device_type = device_type if domain is not None: self.domain = domain if error_code is not None: self.error_code = error_code if error_data is not None: self.error_data = error_data if error_message is not None: self.error_message = error_message if error_percentage is not None: self.error_percentage = error_percentage if error_position is not None: self.error_position = error_position if exit_position is not None: self.exit_position = exit_position if experiment_name is not None: self.experiment_name = experiment_name if ip_address is not None: self.ip_address = ip_address if isp is not None: self.isp = isp if language is not None: self.language = language if license_key is not None: self.license_key = license_key if manifest_download_time is not None: self.manifest_download_time = manifest_download_time if media_path is not None: self.media_path = media_path if media_server is not None: self.media_server = media_server if media_url is not None: self.media_url = media_url if midpoint is not None: self.midpoint = midpoint if min_suggested_duration is not None: self.min_suggested_duration = min_suggested_duration if operatingsystem is not None: self.operatingsystem = operatingsystem if operatingsystem_version_major is not None: self.operatingsystem_version_major = operatingsystem_version_major if operatingsystem_version_minor is not None: self.operatingsystem_version_minor = operatingsystem_version_minor if page_load_time is not None: self.page_load_time = page_load_time if page_load_type is not None: self.page_load_type = page_load_type if path is not None: self.path = path if percentage_in_viewport is not None: self.percentage_in_viewport = percentage_in_viewport if platform is not None: self.platform = platform if player is not None: self.player = player if player_key is not None: self.player_key = player_key if player_startuptime is not None: self.player_startuptime = player_startuptime if player_tech is not None: self.player_tech = player_tech if player_version is not None: self.player_version = player_version if play_percentage is not None: self.play_percentage = play_percentage if quartile_1 is not None: self.quartile_1 = quartile_1 if quartile_3 is not None: self.quartile_3 = quartile_3 if region is not None: self.region = region if screen_height is not None: self.screen_height = screen_height if screen_width is not None: self.screen_width = screen_width if size is not None: self.size = size if skip_percentage is not None: self.skip_percentage = skip_percentage if skip_position is not None: self.skip_position = skip_position if skipped is not None: self.skipped = skipped if started is not None: self.started = started if stream_format is not None: self.stream_format = stream_format if survey_url is not None: self.survey_url = survey_url if time is not None: self.time = time if time_from_content is not None: self.time_from_content = time_from_content if time_hovered is not None: self.time_hovered = time_hovered if time_in_viewport is not None: self.time_in_viewport = time_in_viewport if time_played is not None: self.time_played = time_played if time_to_content is not None: self.time_to_content = time_to_content if time_to_hover is not None: self.time_to_hover = time_to_hover if universal_ad_id_registry is not None: self.universal_ad_id_registry = universal_ad_id_registry if universal_ad_id_value is not None: self.universal_ad_id_value = universal_ad_id_value if user_id is not None: self.user_id = user_id if video_bitrate is not None: self.video_bitrate = video_bitrate if video_id is not None: self.video_id = video_id if video_impression_id is not None: self.video_impression_id = video_impression_id if video_title is not None: self.video_title = video_title if video_window_height is not None: self.video_window_height = video_window_height if video_window_width is not None: self.video_window_width = video_window_width @property def openapi_types(self): types = { 'ad_clickthrough_url': 'string_types', 'ad_description': 'string_types', 'ad_duration': 'int', 'ad_fallback_index': 'int', 'ad_id': 'string_types', 'ad_id_player': 'string_types', 'ad_impression_id': 'string_types', 'ad_is_persistent': 'bool', 'ad_module': 'string_types', 'ad_module_version': 'string_types', 'ad_offset': 'string_types', 'ad_playback_height': 'int', 'ad_playback_width': 'int', 'ad_pod_position': 'int', 'ad_position': 'string_types', 'ad_preload_offset': 'int', 'ad_replace_content_duration': 'int', 'ad_schedule_time': 'int', 'ad_skip_after': 'int', 'ad_skippable': 'bool', 'ad_startup_time': 'int', 'ad_system': 'string_types', 'ad_tag_path': 'string_types', 'ad_tag_server': 'string_types', 'ad_tag_type': 'string_types', 'ad_tag_url': 'string_types', 'ad_title': 'string_types', 'ad_wrapper_ads_count': 'int', 'advertiser_name': 'string_types', 'analytics_version': 'string_types', 'api_framework': 'string_types', 'apiorg_id': 'string_types', 'apiuser_id': 'string_types', 'audio_bitrate': 'int', 'autoplay': 'bool', 'browser': 'string_types', 'browser_is_bot': 'bool', 'browser_version_major': 'string_types', 'browser_version_minor': 'string_types', 'cdn_provider': 'string_types', 'city': 'string_types', 'click_percentage': 'int', 'click_position': 'int', 'clicked': 'int', 'client_time': 'int', 'close_percentage': 'int', 'close_position': 'int', 'closed': 'int', 'completed': 'int', 'country': 'string_types', 'creative_ad_id': 'string_types', 'creative_id': 'string_types', 'custom_data_1': 'string_types', 'custom_data_2': 'string_types', 'custom_data_3': 'string_types', 'custom_data_4': 'string_types', 'custom_data_5': 'string_types', 'custom_data_6': 'string_types', 'custom_data_7': 'string_types', 'custom_user_id': 'string_types', 'deal_id': 'string_types', 'device_class': 'string_types', 'device_type': 'string_types', 'domain': 'string_types', 'error_code': 'int', 'error_data': 'string_types', 'error_message': 'string_types', 'error_percentage': 'int', 'error_position': 'int', 'exit_position': 'int', 'experiment_name': 'string_types', 'ip_address': 'string_types', 'isp': 'string_types', 'language': 'string_types', 'license_key': 'string_types', 'manifest_download_time': 'int', 'media_path': 'string_types', 'media_server': 'string_types', 'media_url': 'string_types', 'midpoint': 'int', 'min_suggested_duration': 'int', 'operatingsystem': 'string_types', 'operatingsystem_version_major': 'string_types', 'operatingsystem_version_minor': 'string_types', 'page_load_time': 'int', 'page_load_type': 'int', 'path': 'string_types', 'percentage_in_viewport': 'int', 'platform': 'string_types', 'player': 'string_types', 'player_key': 'string_types', 'player_startuptime': 'int', 'player_tech': 'string_types', 'player_version': 'string_types', 'play_percentage': 'int', 'quartile_1': 'int', 'quartile_3': 'int', 'region': 'string_types', 'screen_height': 'int', 'screen_width': 'int', 'size': 'string_types', 'skip_percentage': 'int', 'skip_position': 'int', 'skipped': 'int', 'started': 'int', 'stream_format': 'string_types', 'survey_url': 'string_types', 'time': 'int', 'time_from_content': 'int', 'time_hovered': 'int', 'time_in_viewport': 'int', 'time_played': 'int', 'time_to_content': 'int', 'time_to_hover': 'int', 'universal_ad_id_registry': 'string_types', 'universal_ad_id_value': 'string_types', 'user_id': 'string_types', 'video_bitrate': 'int', 'video_id': 'string_types', 'video_impression_id': 'string_types', 'video_title': 'string_types', 'video_window_height': 'int', 'video_window_width': 'int' } return types @property def attribute_map(self): attributes = { 'ad_clickthrough_url': 'ad_clickthrough_url', 'ad_description': 'ad_description', 'ad_duration': 'ad_duration', 'ad_fallback_index': 'ad_fallback_index', 'ad_id': 'ad_id', 'ad_id_player': 'ad_id_player', 'ad_impression_id': 'ad_impression_id', 'ad_is_persistent': 'ad_is_persistent', 'ad_module': 'ad_module', 'ad_module_version': 'ad_module_version', 'ad_offset': 'ad_offset', 'ad_playback_height': 'ad_playback_height', 'ad_playback_width': 'ad_playback_width', 'ad_pod_position': 'ad_pod_position', 'ad_position': 'ad_position', 'ad_preload_offset': 'ad_preload_offset', 'ad_replace_content_duration': 'ad_replace_content_duration', 'ad_schedule_time': 'ad_schedule_time', 'ad_skip_after': 'ad_skip_after', 'ad_skippable': 'ad_skippable', 'ad_startup_time': 'ad_startup_time', 'ad_system': 'ad_system', 'ad_tag_path': 'ad_tag_path', 'ad_tag_server': 'ad_tag_server', 'ad_tag_type': 'ad_tag_type', 'ad_tag_url': 'ad_tag_url', 'ad_title': 'ad_title', 'ad_wrapper_ads_count': 'ad_wrapper_ads_count', 'advertiser_name': 'advertiser_name', 'analytics_version': 'analytics_version', 'api_framework': 'api_framework', 'apiorg_id': 'apiorg_id', 'apiuser_id': 'apiuser_id', 'audio_bitrate': 'audio_bitrate', 'autoplay': 'autoplay', 'browser': 'browser', 'browser_is_bot': 'browser_is_bot', 'browser_version_major': 'browser_version_major', 'browser_version_minor': 'browser_version_minor', 'cdn_provider': 'cdn_provider', 'city': 'city', 'click_percentage': 'click_percentage', 'click_position': 'click_position', 'clicked': 'clicked', 'client_time': 'client_time', 'close_percentage': 'close_percentage', 'close_position': 'close_position', 'closed': 'closed', 'completed': 'completed', 'country': 'country', 'creative_ad_id': 'creative_ad_id', 'creative_id': 'creative_id', 'custom_data_1': 'custom_data_1', 'custom_data_2': 'custom_data_2', 'custom_data_3': 'custom_data_3', 'custom_data_4': 'custom_data_4', 'custom_data_5': 'custom_data_5', 'custom_data_6': 'custom_data_6', 'custom_data_7': 'custom_data_7', 'custom_user_id': 'custom_user_id', 'deal_id': 'deal_id', 'device_class': 'device_class', 'device_type': 'device_type', 'domain': 'domain', 'error_code': 'error_code', 'error_data': 'error_data', 'error_message': 'error_message', 'error_percentage': 'error_percentage', 'error_position': 'error_position', 'exit_position': 'exit_position', 'experiment_name': 'experiment_name', 'ip_address': 'ip_address', 'isp': 'isp', 'language': 'language', 'license_key': 'license_key', 'manifest_download_time': 'manifest_download_time', 'media_path': 'media_path', 'media_server': 'media_server', 'media_url': 'media_url', 'midpoint': 'midpoint', 'min_suggested_duration': 'min_suggested_duration', 'operatingsystem': 'operatingsystem', 'operatingsystem_version_major': 'operatingsystem_version_major', 'operatingsystem_version_minor': 'operatingsystem_version_minor', 'page_load_time': 'page_load_time', 'page_load_type': 'page_load_type', 'path': 'path', 'percentage_in_viewport': 'percentage_in_viewport', 'platform': 'platform', 'player': 'player', 'player_key': 'player_key', 'player_startuptime': 'player_startuptime', 'player_tech': 'player_tech', 'player_version': 'player_version', 'play_percentage': 'play_percentage', 'quartile_1': 'quartile_1', 'quartile_3': 'quartile_3', 'region': 'region', 'screen_height': 'screen_height', 'screen_width': 'screen_width', 'size': 'size', 'skip_percentage': 'skip_percentage', 'skip_position': 'skip_position', 'skipped': 'skipped', 'started': 'started', 'stream_format': 'stream_format', 'survey_url': 'survey_url', 'time': 'time', 'time_from_content': 'time_from_content', 'time_hovered': 'time_hovered', 'time_in_viewport': 'time_in_viewport', 'time_played': 'time_played', 'time_to_content': 'time_to_content', 'time_to_hover': 'time_to_hover', 'universal_ad_id_registry': 'universal_ad_id_registry', 'universal_ad_id_value': 'universal_ad_id_value', 'user_id': 'user_id', 'video_bitrate': 'video_bitrate', 'video_id': 'video_id', 'video_impression_id': 'video_impression_id', 'video_title': 'video_title', 'video_window_height': 'video_window_height', 'video_window_width': 'video_window_width' } return attributes @property def ad_clickthrough_url(self): return self._ad_clickthrough_url @ad_clickthrough_url.setter def ad_clickthrough_url(self, ad_clickthrough_url): if ad_clickthrough_url is not None: if not isinstance(ad_clickthrough_url, string_types): raise TypeError("Invalid type for `ad_clickthrough_url`, type has to be `string_types`") self._ad_clickthrough_url = ad_clickthrough_url @property def ad_description(self): return self._ad_description @ad_description.setter
MIT License
mila-iqia/myia
myia_backend_relay/myia_backend_relay/relay_philox.py
Philox2x32.__init__
python
def __init__(self, output_size: int): self.output_size = output_size self.n = self.get_counter_size(output_size) ctr_type = relay.ty.TensorType((self.n,), "uint64") local_ctr = relay.var("ctr", type_annotation=ctr_type) local_key = relay.var("key", dtype="uint32", shape=()) self.philox_2x_round = generate_function( self.__impl_philox_2x_round, [local_ctr, local_key] )
Initialize Philox2x32 RNG for given output size.
https://github.com/mila-iqia/myia/blob/56774a39579b4ec4123f44843ad4ca688acc859b/myia_backend_relay/myia_backend_relay/relay_philox.py#L92-L101
import numpy as np from tvm import relay PHILOX_M2x32_0 = np.uint64(0xD256D193) PHILOX_W32_0 = np.uint32(0x9E3779B9) PHILOX2x32_DEFAULT_ROUNDS = 10 F1 = np.float32(1) F2 = np.float32(2) F128 = np.float32(128) F1024 = np.float32(1024) R123_0x1p_31f = F1 / (F1024 * F1024 * F1024 * F2) R123_0x1p_24f = F128 * R123_0x1p_31f RELAY_UINT32_8 = relay.const(8, "uint32") RELAY_UINT64_32 = relay.const(32, "uint64") RELAY_UINT64_CLEAR_HIGH = relay.const(0x00000000FFFFFFFF, "uint64") RELAY_PHILOX_M2x32_0 = relay.const(PHILOX_M2x32_0, "uint64") RELAY_PHILOX_W32_0 = relay.const(PHILOX_W32_0, "uint32") RELAY_R123_0x1p_24f = relay.const(R123_0x1p_24f, "float32") def generate_function(impl, inputs): output = impl(*inputs) return relay.Function(list(inputs), output) class Philox2x32: __slots__ = ("output_size", "n", "philox_2x_round") @staticmethod def get_counter_size(output_size): return (output_size + (output_size % 2)) // 2 def generate_relay_counter_array(self, counter): c = relay.cast(counter, "uint64") b = relay.op.transform.full(c, (self.n,), "uint64") d = relay.left_shift(b, RELAY_UINT64_32) e = relay.arange(relay.const(self.n, "uint64"), dtype="uint64") return relay.bitwise_or(d, e)
MIT License
osmr/imgclsmob
tensorflow2/tf2cv/models/resnet_cub.py
resnet200_cub
python
def resnet200_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=200, model_name="resnet200_cub", **kwargs)
ResNet-200 model for CUB-200-2011 from 'Deep Residual Learning for Image Recognition,' https://arxiv.org/abs/1512.03385. It's an experimental model. Parameters: ---------- classes : int, default 200 Number of classification classes. pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.tensorflow/models' Location for keeping the model parameters.
https://github.com/osmr/imgclsmob/blob/ea5f784eea865ce830f3f97c5c1d1f6491d9cbb2/tensorflow2/tf2cv/models/resnet_cub.py#L289-L303
__all__ = ['resnet10_cub', 'resnet12_cub', 'resnet14_cub', 'resnetbc14b_cub', 'resnet16_cub', 'resnet18_cub', 'resnet26_cub', 'resnetbc26b_cub', 'resnet34_cub', 'resnetbc38b_cub', 'resnet50_cub', 'resnet50b_cub', 'resnet101_cub', 'resnet101b_cub', 'resnet152_cub', 'resnet152b_cub', 'resnet200_cub', 'resnet200b_cub'] from .common import is_channels_first from .resnet import get_resnet def resnet10_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=10, model_name="resnet10_cub", **kwargs) def resnet12_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=12, model_name="resnet12_cub", **kwargs) def resnet14_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=14, model_name="resnet14_cub", **kwargs) def resnetbc14b_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=14, bottleneck=True, conv1_stride=False, model_name="resnetbc14b_cub", **kwargs) def resnet16_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=16, model_name="resnet16_cub", **kwargs) def resnet18_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=18, model_name="resnet18_cub", **kwargs) def resnet26_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=26, bottleneck=False, model_name="resnet26_cub", **kwargs) def resnetbc26b_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=26, bottleneck=True, conv1_stride=False, model_name="resnetbc26b_cub", **kwargs) def resnet34_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=34, model_name="resnet34_cub", **kwargs) def resnetbc38b_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=38, bottleneck=True, conv1_stride=False, model_name="resnetbc38b_cub", **kwargs) def resnet50_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=50, model_name="resnet50_cub", **kwargs) def resnet50b_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=50, conv1_stride=False, model_name="resnet50b_cub", **kwargs) def resnet101_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=101, model_name="resnet101_cub", **kwargs) def resnet101b_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=101, conv1_stride=False, model_name="resnet101b_cub", **kwargs) def resnet152_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=152, model_name="resnet152_cub", **kwargs) def resnet152b_cub(classes=200, **kwargs): return get_resnet(classes=classes, blocks=152, conv1_stride=False, model_name="resnet152b_cub", **kwargs)
MIT License
rice-eic/shiftaddnet
se_shift/utils_swa.py
bn_update
python
def bn_update(loader, model): if not check_bn(model): return model.train() momenta = {} model.apply(reset_bn) model.apply(lambda module: _get_momenta(module, momenta)) n = 0 for input, _ in loader: input = input.cuda(non_blocking=True) input_var = torch.autograd.Variable(input) b = input_var.data.size(0) momentum = b / (n + b) for module in momenta.keys(): module.momentum = momentum model(input_var) n += b model.apply(lambda module: _set_momenta(module, momenta))
BatchNorm buffers update (if any). Performs 1 epochs to estimate buffers average using train dataset. :param loader: train dataset loader for buffers average estimation. :param model: model being update :return: None
https://github.com/rice-eic/shiftaddnet/blob/654a6327444ba471ab6a80410d8835b5dd24c3a3/se_shift/utils_swa.py#L35-L62
import torch def moving_average(net1, net2, alpha=1): for param1, param2 in zip(net1.parameters(), net2.parameters()): param1.data *= (1.0 - alpha) param1.data += param2.data * alpha def _check_bn(module, flag): if issubclass(module.__class__, torch.nn.modules.batchnorm._BatchNorm): flag[0] = True def check_bn(model): flag = [False] model.apply(lambda module: _check_bn(module, flag)) return flag[0] def reset_bn(module): if issubclass(module.__class__, torch.nn.modules.batchnorm._BatchNorm): module.running_mean = torch.zeros_like(module.running_mean) module.running_var = torch.ones_like(module.running_var) def _get_momenta(module, momenta): if issubclass(module.__class__, torch.nn.modules.batchnorm._BatchNorm): momenta[module] = module.momentum def _set_momenta(module, momenta): if issubclass(module.__class__, torch.nn.modules.batchnorm._BatchNorm): module.momentum = momenta[module]
MIT License
shijie-wu/neural-transducer
src/model.py
Transducer.forward
python
def forward(self, src_batch, src_mask, trg_batch): enc_hs = self.encode(src_batch) output = self.decode(enc_hs, src_mask, trg_batch) return output
only for training
https://github.com/shijie-wu/neural-transducer/blob/1af27fb62a1dfa7a8b254b23b67399fcc7c0888d/src/model.py#L201-L209
import math from collections import namedtuple import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from torch.distributions import Distribution from dataloader import PAD_IDX, STEP_IDX EPSILON = 1e-7 DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu") class Identity(nn.Module): def forward(self, x): return x class StackedLSTM(nn.Module): def __init__(self, input_siz, rnn_siz, nb_layers, dropout): super().__init__() self.nb_layers = nb_layers self.rnn_siz = rnn_siz self.layers = nn.ModuleList() self.dropout = nn.Dropout(dropout) for _ in range(nb_layers): self.layers.append(nn.LSTMCell(input_siz, rnn_siz)) input_siz = rnn_siz def get_init_hx(self, batch_size): h_0_s, c_0_s = [], [] for _ in range(self.nb_layers): h_0 = torch.zeros((batch_size, self.rnn_siz), device=DEVICE) c_0 = torch.zeros((batch_size, self.rnn_siz), device=DEVICE) h_0_s.append(h_0) c_0_s.append(c_0) return (h_0_s, c_0_s) def forward(self, input, hidden): h_0, c_0 = hidden h_1, c_1 = [], [] for i, layer in enumerate(self.layers): h_1_i, c_1_i = layer(input, (h_0[i], c_0[i])) input = self.dropout(h_1_i) h_1 += [h_1_i] c_1 += [c_1_i] h_1 = torch.stack(h_1) c_1 = torch.stack(c_1) return input, (h_1, c_1) class Attention(nn.Module): def forward(self, ht, hs, mask, weighted_ctx=True): hs, hs_ = hs hs = hs.transpose(0, 1) hs_ = hs_.transpose(0, 1) ht = ht.unsqueeze(2) score = torch.bmm(hs_, ht).squeeze(2) attn = F.softmax(score, dim=-1) * mask.transpose(0, 1) + EPSILON attn = attn / attn.sum(-1, keepdim=True) attn = attn.unsqueeze(1) if weighted_ctx: weight_hs = torch.bmm(attn, hs).squeeze(1) else: weight_hs = None return weight_hs, attn class Transducer(nn.Module): def __init__( self, *, src_vocab_size, trg_vocab_size, embed_dim, src_hid_size, src_nb_layers, trg_hid_size, trg_nb_layers, dropout_p, src_c2i, trg_c2i, attr_c2i, **kwargs ): super().__init__() self.src_vocab_size = src_vocab_size self.trg_vocab_size = trg_vocab_size self.embed_dim = embed_dim self.src_hid_size = src_hid_size self.src_nb_layers = src_nb_layers self.trg_hid_size = trg_hid_size self.trg_nb_layers = trg_nb_layers self.dropout_p = dropout_p self.src_c2i, self.trg_c2i, self.attr_c2i = src_c2i, trg_c2i, attr_c2i self.src_embed = nn.Embedding(src_vocab_size, embed_dim, padding_idx=PAD_IDX) self.trg_embed = nn.Embedding(trg_vocab_size, embed_dim, padding_idx=PAD_IDX) self.enc_rnn = nn.LSTM( embed_dim, src_hid_size, src_nb_layers, bidirectional=True, dropout=dropout_p, ) self.dec_rnn = StackedLSTM(embed_dim, trg_hid_size, trg_nb_layers, dropout_p) self.out_dim = trg_hid_size + src_hid_size * 2 self.scale_enc_hs = nn.Linear(src_hid_size * 2, trg_hid_size) self.attn = Attention() self.linear_out = nn.Linear(self.out_dim, self.out_dim) self.final_out = nn.Linear(self.out_dim, trg_vocab_size) self.dropout = nn.Dropout(dropout_p) def encode(self, src_batch): enc_hs, _ = self.enc_rnn(self.dropout(self.src_embed(src_batch))) scale_enc_hs = self.scale_enc_hs(enc_hs) return enc_hs, scale_enc_hs def decode_step(self, enc_hs, enc_mask, input_, hidden): h_t, hidden = self.dec_rnn(input_, hidden) ctx, attn = self.attn(h_t, enc_hs, enc_mask) ctx = torch.cat((ctx, h_t), dim=1) ctx = self.linear_out(ctx) ctx = torch.tanh(ctx) word_logprob = F.log_softmax(self.final_out(ctx), dim=-1) return word_logprob, hidden, attn def decode(self, enc_hs, enc_mask, trg_batch): trg_seq_len = trg_batch.size(0) trg_bat_siz = trg_batch.size(1) trg_embed = self.dropout(self.trg_embed(trg_batch)) output = [] hidden = self.dec_rnn.get_init_hx(trg_bat_siz) for idx in range(trg_seq_len - 1): input_ = trg_embed[idx, :] word_logprob, hidden, _ = self.decode_step(enc_hs, enc_mask, input_, hidden) output += [word_logprob] return torch.stack(output)
MIT License
chainer/chainercv
chainercv/links/model/fpn/mask_utils.py
mask_to_segm
python
def mask_to_segm(mask, bbox, segm_size, index=None): pad = 1 _, H, W = mask.shape bbox = chainer.backends.cuda.to_cpu(bbox) padded_segm_size = segm_size + pad * 2 expand_scale = padded_segm_size / segm_size bbox = _expand_bbox(bbox, expand_scale) resize_size = padded_segm_size bbox = _integerize_bbox(bbox) segm = [] if index is None: index = np.arange(len(bbox)) else: index = chainer.backends.cuda.to_cpu(index) for i, bb in zip(index, bbox): y_min = max(bb[0], 0) x_min = max(bb[1], 0) y_max = max(min(bb[2], H), 0) x_max = max(min(bb[3], W), 0) if y_max <= y_min or x_max <= x_min: segm.append(np.zeros((segm_size, segm_size), dtype=np.float32)) continue bb_height = bb[2] - bb[0] bb_width = bb[3] - bb[1] cropped_m = np.zeros((bb_height, bb_width), dtype=np.bool) y_offset = y_min - bb[0] x_offset = x_min - bb[1] cropped_m[y_offset:y_offset + y_max - y_min, x_offset:x_offset + x_max - x_min] = chainer.backends.cuda.to_cpu(mask[i, y_min:y_max, x_min:x_max]) with chainer.using_config('cv_resize_backend', 'cv2'): sgm = transforms.resize( cropped_m[None].astype(np.float32), (resize_size, resize_size))[0].astype(np.int32) segm.append(sgm[pad:-pad, pad:-pad]) return np.array(segm, dtype=np.float32)
Crop and resize mask. This function requires cv2. Args: mask (~numpy.ndarray): See below. bbox (~numpy.ndarray): See below. segm_size (int): The size of segm :math:`S`. index (~numpy.ndarray): See below. :math:`R = N` when :obj:`index` is :obj:`None`. Returns: ~numpy.ndarray: See below. .. csv-table:: :header: name, shape, dtype, format :obj:`mask`, ":math:`(N, H, W)`", :obj:`bool`, -- :obj:`bbox`, ":math:`(R, 4)`", :obj:`float32`, \ ":math:`(y_{min}, x_{min}, y_{max}, x_{max})`" :obj:`index` (optional), ":math:`(R,)`", :obj:`int32`, -- :obj:`segms` (output), ":math:`(R, S, S)`", :obj:`float32`, \ ":math:`[0, 1]`"
https://github.com/chainer/chainercv/blob/7159616642e0be7c5b3ef380b848e16b7e99355b/chainercv/links/model/fpn/mask_utils.py#L10-L83
from __future__ import division import numpy as np import chainer from chainercv import transforms
MIT License
thingsboard/python_tb_rest_client
tb_rest_client/models/models_pe/converter.py
Converter.id
python
def id(self): return self._id
Gets the id of this Converter. # noqa: E501 :return: The id of this Converter. # noqa: E501 :rtype: ConverterId
https://github.com/thingsboard/python_tb_rest_client/blob/87c6a3703974fc8a86e4c72c444168ee2b758ecb/tb_rest_client/models/models_pe/converter.py#L165-L172
import pprint import re import six class Converter(object): swagger_types = { 'additional_info': 'str', 'configuration': 'str', 'created_time': 'int', 'debug_mode': 'bool', 'id': 'ConverterId', 'name': 'str', 'tenant_id': 'TenantId', 'type': 'str' } attribute_map = { 'additional_info': 'additionalInfo', 'configuration': 'configuration', 'created_time': 'createdTime', 'debug_mode': 'debugMode', 'id': 'id', 'name': 'name', 'tenant_id': 'tenantId', 'type': 'type' } def __init__(self, additional_info=None, configuration=None, created_time=None, debug_mode=None, id=None, name=None, tenant_id=None, type=None): self._additional_info = None self._configuration = None self._created_time = None self._debug_mode = None self._id = None self._name = None self._tenant_id = None self._type = None self.discriminator = None if additional_info is not None: self.additional_info = additional_info if configuration is not None: self.configuration = configuration if created_time is not None: self.created_time = created_time if debug_mode is not None: self.debug_mode = debug_mode if id is not None: self.id = id if name is not None: self.name = name if tenant_id is not None: self.tenant_id = tenant_id if type is not None: self.type = type @property def additional_info(self): return self._additional_info @additional_info.setter def additional_info(self, additional_info): self._additional_info = additional_info @property def configuration(self): return self._configuration @configuration.setter def configuration(self, configuration): self._configuration = configuration @property def created_time(self): return self._created_time @created_time.setter def created_time(self, created_time): self._created_time = created_time @property def debug_mode(self): return self._debug_mode @debug_mode.setter def debug_mode(self, debug_mode): self._debug_mode = debug_mode @property
Apache License 2.0
galactics/beyond
beyond/frames/frames.py
get_frame
python
def get_frame(frame): if frame not in dynamic.keys() and config.get( "env", "jpl", "dynamic_frames", fallback=False ): from ..env.jpl import create_frames, JplConfigError try: create_frames() except (JplConfigError, UnknownBodyError) as e: raise UnknownFrameError(frame) from e try: return dynamic[frame] except KeyError: raise UnknownFrameError(frame)
Frame factory Args: frame (str): name of the desired frame Return: Frame: the object representing the frame demanded Raise: ~beyond.frames.frames.UnknownFrameError
https://github.com/galactics/beyond/blob/1326a03ef2c918244078c48f8878705510a8eb1d/beyond/frames/frames.py#L49-L73
import sys import logging import numpy as np from ..config import config from ..errors import UnknownFrameError, UnknownBodyError from ..constants import Earth from ..utils.matrix import rot3, expand from . import orient, center from .local import to_local CIO = ["ITRF", "TIRF", "CIRF", "GCRF"] IAU1980 = ["TOD", "MOD"] OTHER = ["EME2000", "TEME", "WGS84", "PEF", "G50", "Hill"] __all__ = CIO + IAU1980 + OTHER + ["get_frame"] log = logging.getLogger(__name__) class FrameCache(dict): def __getattr__(self, name): if name not in self: raise AttributeError(name) return self[name] dynamic = FrameCache() sys.modules[__name__ + ".dynamic"] = dynamic
MIT License
ibm/causallib
causallib/estimation/matching.py
Matching.get_covariates_of_matches
python
def get_covariates_of_matches(self, s, t, covariates): match_df = self._get_match_df() subdf = match_df.loc[s][self.treatments_ == t] sample_id_name = subdf.index.name def get_covariate_difference_from_nearest_match(source_row_index): j = subdf.loc[source_row_index].matches[0] delta_series = pd.Series( covariates.loc[source_row_index] - covariates.loc[j]) source_row = covariates.loc[j].copy() source_row.at[sample_id_name] = j target_row = covariates.loc[source_row_index].copy() target_row = target_row covariate_differences = pd.concat( { t: target_row, s: source_row, "delta": delta_series, "outcomes": pd.Series( {t: self.outcome_.loc[source_row_index], s: self.outcome_.loc[j]} ), "match": pd.Series( dict( n_neighbors=len( subdf.loc[source_row_index].matches), distance=subdf.loc[source_row_index].distances[0], ) ), } ) return covariate_differences covdf = pd.DataFrame( data=[get_covariate_difference_from_nearest_match(i) for i in subdf.index], index=subdf.index ) covdf = covdf.reset_index() cols = covdf.columns covdf.columns = pd.MultiIndex.from_tuples( [(t, sample_id_name)] + list(cols[1:])) return covdf
Look up covariates of closest matches for a given matching. Using `self.match_df_` and the supplied `covariates`, look up the covariates of the last match. The function can only be called after `match` has been run. Args: s (int) : source treatment value t (int) : target treatment value covariates (pd.DataFrame) : The same covariates which were passed to `fit`. Returns: covariate_df (pd.DataFrame) : a DataFrame of size (n_matched_samples, n_covariates * 3 + 2) with the covariate values of the sample, covariates of its match, calculated distance and number of neighbors found within the given caliper (with no caliper this will equal self.n_neighbors )
https://github.com/ibm/causallib/blob/43fcb22813da04ad0d09ae5c634a0f0b02f2da9d/causallib/estimation/matching.py#L366-L428
import warnings import pandas as pd import numpy as np from itertools import permutations, combinations from collections import namedtuple, Counter from sklearn.covariance import EmpiricalCovariance from sklearn.neighbors import NearestNeighbors from sklearn.exceptions import NotFittedError from sklearn.base import clone as sk_clone from .base_estimator import IndividualOutcomeEstimator from scipy.optimize import linear_sum_assignment from scipy.spatial import distance KNN = namedtuple("KNN", "learner index") VERY_LARGE_NUMBER = np.finfo('d').max def majority_rule(x): return Counter(x).most_common(1)[0][0] class Matching(IndividualOutcomeEstimator): def __init__( self, propensity_transform=None, caliper=None, with_replacement=True, n_neighbors=1, matching_mode="both", metric="mahalanobis", knn_backend="sklearn", estimate_observed_outcome=False, ): self.propensity_transform = propensity_transform self.covariance_conditioner = EmpiricalCovariance() self.caliper = caliper self.with_replacement = with_replacement self.n_neighbors = n_neighbors self.matching_mode = matching_mode self.metric = metric self.classify_agg_function = majority_rule self.regress_agg_function = np.mean self.knn_backend = knn_backend self.estimate_observed_outcome = estimate_observed_outcome def fit(self, X, a, y, sample_weight=None): self._clear_post_fit_variables() self.outcome_ = y.copy() self.treatments_ = a.copy() if self.propensity_transform: self.propensity_transform.fit(X, a) X = self.propensity_transform.transform(X) self.conditioned_covariance_ = self._calculate_covariance(X) self.treatment_knns_ = {} for a in self.treatments_.unique(): haystack = X[self.treatments_ == a] self.treatment_knns_[a] = self._fit_sknn(haystack) return self def _execute_matching(self, X, a): if self.n_neighbors != 1 and not self.with_replacement: raise NotImplementedError( "Matching more than one neighbor is only implemented for" "no-replacement" ) if self.propensity_transform: X = self.propensity_transform.transform(X) if self.with_replacement: self.match_df_ = self._withreplacement_match(X, a) else: self.match_df_ = self._noreplacement_match(X, a) sample_id_name = X.index.name if X.index.name is not None else "sample_id" self.match_df_.index.set_names( ["match_to_treatment", sample_id_name], inplace=True ) self.samples_used_ = self._count_samples_used_by_treatment_value(a) return self.match_df_ def estimate_individual_outcome( self, X, a, y=None, treatment_values=None, predict_proba=True, dropna=True ): match_df = self.match(X, a, use_cached_result=True) outcome_df = self._aggregate_match_df_to_generate_outcome_df( match_df, a, predict_proba) outcome_df = self._filter_outcome_df_by_matching_mode(outcome_df, a) if outcome_df.isna().all(axis=None): raise ValueError("Matching was not successful and no outcomes can" "be estimated. Check caliper value.") if dropna: outcome_df = outcome_df.dropna() return outcome_df def match(self, X, a, use_cached_result=True, successful_matches_only=False): cached_result_available = (hasattr(self, "match_df_") and X.index.equals(self.match_df_.loc[0].index)) if not (use_cached_result and cached_result_available): self._execute_matching(X, a) return self._get_match_df(successful_matches_only=successful_matches_only) def matches_to_weights(self, match_df=None): if match_df is None: match_df = self._get_match_df(successful_matches_only=False) match_permutations = sorted(permutations(self.treatments_.unique())) weights_df = pd.DataFrame([ self._matches_to_weights_single_matching(s, t, match_df) for s, t in match_permutations],).T return weights_df
Apache License 2.0
sammchardy/python-binance-chain
binance_chain/messages.py
LimitOrderMsg.__init__
python
def __init__(self, symbol: str, side: OrderSide, price: Union[int, float, Decimal], quantity: Union[int, float, Decimal], time_in_force: TimeInForce = TimeInForce.GOOD_TILL_EXPIRE, wallet: Optional[BaseWallet] = None): super().__init__( wallet=wallet, symbol=symbol, time_in_force=time_in_force, order_type=OrderType.LIMIT, side=side, price=price, quantity=quantity )
NewOrder transaction creates a new order to buy and sell tokens on Binance DEX. :param symbol: symbol for trading pair in full name of the tokens e.g. 'ANN-457_BNB' :param side: OrderSide (BUY, SELL) :param price: price of the order e.g. Decimal(0.000396000) or 0.002384 :param quantity: quantity of the order Decimal(12) or 12 :param time_in_force: TimeInForce type (GOOD_TILL_EXPIRE, IMMEDIATE_OR_CANCEL) default GOOD_TILL_EXPIRE
https://github.com/sammchardy/python-binance-chain/blob/19d7d639cc912a27ec86831338c2a2dc96289d50/binance_chain/messages.py#L176-L197
import ujson as json import binascii from typing import List, Dict, Union, Optional, NamedTuple from decimal import Decimal from collections import OrderedDict from binance_chain.wallet import BaseWallet from binance_chain.constants import TimeInForce, OrderSide, OrderType, VoteOption from binance_chain.protobuf.dex_pb2 import ( NewOrder, CancelOrder, TokenFreeze, TokenUnfreeze, StdTx, StdSignature, Send, Input, Output, Token, Vote ) from binance_chain.utils.encode_utils import encode_number, varint_encode from binance_chain.utils.segwit_addr import decode_address BROADCAST_SOURCE = 0 class Transfer(NamedTuple): amount: Union[int, float, Decimal] symbol: str class Msg: AMINO_MESSAGE_TYPE = "" INCLUDE_AMINO_LENGTH_PREFIX = False def __init__(self, wallet: BaseWallet, memo: str = ''): self._wallet = wallet self._memo = memo def to_dict(self) -> Dict: return {} def to_sign_dict(self) -> Dict: return {} def to_protobuf(self): pass def to_amino(self): proto = self.to_protobuf() if type(proto) != bytes: proto = proto.SerializeToString() type_bytes = b"" if self.AMINO_MESSAGE_TYPE: type_bytes = binascii.unhexlify(self.AMINO_MESSAGE_TYPE) varint_length = varint_encode(len(proto) + len(type_bytes)) else: varint_length = varint_encode(len(proto)) msg = b"" if self.INCLUDE_AMINO_LENGTH_PREFIX: msg += varint_length msg += type_bytes + proto return msg @property def wallet(self): return self._wallet @property def memo(self): return self._memo def to_hex_data(self): return binascii.hexlify(StdTxMsg(self).to_amino()) def increment_sequence(self): self._wallet.increment_account_sequence() class Signature: def __init__(self, msg: Msg, data=None): self._msg = msg self._chain_id = msg.wallet.chain_id self._data = data self._source = BROADCAST_SOURCE def to_json(self): return json.dumps(OrderedDict([ ('account_number', str(self._msg.wallet.account_number)), ('chain_id', self._chain_id), ('data', self._data), ('memo', self._msg.memo), ('msgs', [self._msg.to_dict()]), ('sequence', str(self._msg.wallet.sequence)), ('source', str(self._source)) ]), ensure_ascii=False) def to_bytes_json(self): return self.to_json().encode() def sign(self, wallet: Optional[BaseWallet] = None): wallet = wallet or self._msg.wallet json_bytes = self.to_bytes_json() signed = wallet.sign_message(json_bytes) return signed[-64:] class NewOrderMsg(Msg): AMINO_MESSAGE_TYPE = b"CE6DC043" def __init__(self, symbol: str, time_in_force: TimeInForce, order_type: OrderType, side: OrderSide, price: Union[int, float, Decimal], quantity: Union[int, float, Decimal], wallet: Optional[BaseWallet] = None): super().__init__(wallet) self._symbol = symbol self._time_in_force = time_in_force.value self._order_type = order_type.value self._side = side.value self._price = price self._price_encoded = encode_number(price) self._quantity = quantity self._quantity_encoded = encode_number(quantity) def to_dict(self) -> Dict: return OrderedDict([ ('id', self._wallet.generate_order_id()), ('ordertype', self._order_type), ('price', self._price_encoded), ('quantity', self._quantity_encoded), ('sender', self._wallet.address), ('side', self._side), ('symbol', self._symbol), ('timeinforce', self._time_in_force), ]) def to_sign_dict(self) -> Dict: return{ 'order_type': self._order_type, 'price': self._price, 'quantity': self._quantity, 'side': self._side, 'symbol': self._symbol, 'time_in_force': self._time_in_force, } def to_protobuf(self) -> NewOrder: pb = NewOrder() pb.sender = self._wallet.address_decoded pb.id = self._wallet.generate_order_id() pb.symbol = self._symbol.encode() pb.timeinforce = self._time_in_force pb.ordertype = self._order_type pb.side = self._side pb.price = self._price_encoded pb.quantity = self._quantity_encoded return pb class LimitOrderMsg(NewOrderMsg):
MIT License
golemhq/golem
golem/core/file_manager.py
rename_directory
python
def rename_directory(basepath, src, dst): errors = [] srcpath = os.path.join(basepath, src) dstpath = os.path.join(basepath, dst) if not os.path.exists(srcpath): errors.append(f'Directory {src} does not exist') elif not os.path.isdir(srcpath): errors.append(f'Path {src} is not a directory') elif os.path.exists(dstpath): errors.append(f'Path {dst} already exists') else: try: dirname = os.path.dirname(dst) if dirname: create_package_directories(basepath, dirname) os.rename(srcpath, dstpath) except PermissionError: errors.append('Error: PermissionError') except Exception as e: errors.append('An error occurred while renaming folder') return errors
Rename a directory folder. src and dst must be relative paths to basepath. src must exists and be a directory. dst must not exist.
https://github.com/golemhq/golem/blob/ab6a08ee54d2c5d27ab6af15b833ce3d2575d3e3/golem/core/file_manager.py#L168-L193
import os import shutil from golem.core import session def _directory_element(elem_type, name, dot_path=None): element = { 'type': elem_type, 'name': name, 'dot_path': dot_path, 'sub_elements': [] } return element def generate_file_structure_dict(full_path, original_path=None): root_dir_name = os.path.basename(os.path.normpath(full_path)) if not original_path: original_path = full_path rel_path = os.path.relpath(full_path, original_path).replace(os.sep, '.') rel_dot_path = '' if rel_path == '.' else rel_path.replace(os.sep, '.') element = _directory_element('directory', root_dir_name, rel_dot_path) all_sub_elements = os.listdir(full_path) files = [] directories = [] for elem in all_sub_elements: if os.path.isdir(os.path.join(full_path, elem)): if elem not in ['__pycache__']: directories.append(elem) else: cond1 = elem.endswith('.py') cond2 = elem not in ['__init__.py', '.DS_Store'] if cond1 and cond2: files.append(os.path.splitext(elem)[0]) for directory in directories: sub_element = generate_file_structure_dict(os.path.join(full_path, directory), original_path) element['sub_elements'].append(sub_element) for filename in sorted(files): full_file_path = os.path.join(full_path, filename) rel_file_path = os.path.relpath(full_file_path, original_path) dot_file_path = rel_file_path.replace(os.sep, '.') file_element = _directory_element('file', filename, dot_file_path) element['sub_elements'].append(file_element) return element def get_files_dot_path(base_path, extension=None): all_files = [] files_with_dotted_path = [] for path, subdirs, files in os.walk(base_path): if '__pycache__' not in path: for name in files: if name not in ['__init__.py', '.DS_Store']: root, ext = os.path.splitext(name) if extension and ext != extension: continue filepath = os.path.join(path, root) all_files.append(filepath) for file in all_files: rel_path_as_list = file.replace(base_path, '').split(os.sep) rel_path_as_list = [x for x in rel_path_as_list if x != ''] files_with_dotted_path.append('.'.join(rel_path_as_list)) return files_with_dotted_path def create_directory(path_list=None, path=None, add_init=False): if path_list: path = os.sep.join(path_list) if not os.path.isdir(path): os.makedirs(path, exist_ok=True) if add_init: init_path = os.path.join(path, '__init__.py') open(init_path, 'a').close() def create_package(path_list=None, path=None): create_directory(path_list, path, add_init=True) def rename_file(old_path, new_path): errors = [] if not os.path.isfile(old_path): errors.append(f'File {old_path} does not exist') elif os.path.isfile(new_path): errors.append('A file with that name already exists') else: try: os.makedirs(os.path.dirname(new_path), exist_ok=True) os.rename(old_path, new_path) except: errors.append('There was an error renaming file') return errors def new_directory_of_type(project, parents, dir_name, dir_type): errors = [] if dir_type not in ['tests', 'suites', 'pages']: errors.append(f'{dir_type} is not a valid dir_type') else: parents = os.sep.join(parents) path = os.path.join(session.testdir, 'projects', project, dir_type, parents, dir_name) if os.path.exists(path): errors.append('A directory with that name already exists') else: create_directory(path=path, add_init=True) return errors def create_package_directories(base_path, rel_path): dirs = rel_path.split(os.sep) for dir_ in dirs: base_path = os.path.join(base_path, dir_) if not os.path.isdir(base_path): create_package(path=base_path)
MIT License
openstack/manila
manila/share/drivers/infinidat/infinibox.py
InfiniboxShareDriver._get_infinidat_access_level
python
def _get_infinidat_access_level(self, access): access_level = access['access_level'] try: return _MANILA_TO_INFINIDAT_ACCESS_LEVEL[access_level] except KeyError: raise exception.InvalidShareAccessLevel(level=access_level)
Translates between Manila access levels to INFINIDAT API ones
https://github.com/openstack/manila/blob/34d209484366cd921e052d37c5f9daef5e97af20/manila/share/drivers/infinidat/infinibox.py#L315-L321
import functools import ipaddress from oslo_config import cfg from oslo_log import log as logging from oslo_utils import units import six from manila.common import constants from manila import exception from manila.i18n import _ from manila.share import driver from manila.share import utils from manila import version try: import capacity import infinisdk except ImportError: capacity = None infinisdk = None LOG = logging.getLogger(__name__) infinidat_connection_opts = [ cfg.HostAddressOpt('infinibox_hostname', help='The name (or IP address) for the INFINIDAT ' 'Infinibox storage system.'), ] infinidat_auth_opts = [ cfg.StrOpt('infinibox_login', help=('Administrative user account name used to access the ' 'INFINIDAT Infinibox storage system.')), cfg.StrOpt('infinibox_password', help=('Password for the administrative user account ' 'specified in the infinibox_login option.'), secret=True), ] infinidat_general_opts = [ cfg.StrOpt('infinidat_pool_name', help='Name of the pool from which volumes are allocated.'), cfg.StrOpt('infinidat_nas_network_space_name', help='Name of the NAS network space on the INFINIDAT ' 'InfiniBox.'), cfg.BoolOpt('infinidat_thin_provision', help='Use thin provisioning.', default=True)] CONF = cfg.CONF CONF.register_opts(infinidat_connection_opts) CONF.register_opts(infinidat_auth_opts) CONF.register_opts(infinidat_general_opts) _MANILA_TO_INFINIDAT_ACCESS_LEVEL = { constants.ACCESS_LEVEL_RW: 'RW', constants.ACCESS_LEVEL_RO: 'RO', } _API_MAX_RETRIES = 5 _INFINIDAT_MANILA_IDENTIFIER = ( "manila/%s" % version.version_info.release_string()) def infinisdk_to_manila_exceptions(func): @functools.wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except infinisdk.core.exceptions.InfiniSDKException as ex: msg = _('Caught exception from infinisdk: %s') % ex LOG.exception(msg) raise exception.ShareBackendException(msg=msg) return wrapper class InfiniboxShareDriver(driver.ShareDriver): VERSION = '1.0' def __init__(self, *args, **kwargs): super(InfiniboxShareDriver, self).__init__(False, *args, **kwargs) self.configuration.append_config_values(infinidat_connection_opts) self.configuration.append_config_values(infinidat_auth_opts) self.configuration.append_config_values(infinidat_general_opts) def _setup_and_get_system_object(self, management_address, auth): system = infinisdk.InfiniBox(management_address, auth=auth) system.api.add_auto_retry( lambda e: isinstance( e, infinisdk.core.exceptions.APITransportFailure) and "Interrupted system call" in e.error_desc, _API_MAX_RETRIES) system.api.set_source_identifier(_INFINIDAT_MANILA_IDENTIFIER) system.login() return system def do_setup(self, context): if infinisdk is None: msg = _("Missing 'infinisdk' python module, ensure the library" " is installed and available.") raise exception.ManilaException(message=msg) infinibox_login = self._safe_get_from_config_or_fail('infinibox_login') infinibox_password = ( self._safe_get_from_config_or_fail('infinibox_password')) auth = (infinibox_login, infinibox_password) management_address = ( self._safe_get_from_config_or_fail('infinibox_hostname')) self._pool_name = ( self._safe_get_from_config_or_fail('infinidat_pool_name')) self._network_space_name = ( self._safe_get_from_config_or_fail( 'infinidat_nas_network_space_name')) self._system = ( self._setup_and_get_system_object(management_address, auth)) backend_name = self.configuration.safe_get('share_backend_name') self._backend_name = backend_name or self.__class__.__name__ thin_provisioning = self.configuration.infinidat_thin_provision self._provtype = "THIN" if thin_provisioning else "THICK" LOG.debug('setup complete') def _update_share_stats(self): (free_capacity_bytes, physical_capacity_bytes, provisioned_capacity_gb) = self._get_available_capacity() max_over_subscription_ratio = ( self.configuration.max_over_subscription_ratio) data = dict( share_backend_name=self._backend_name, vendor_name='INFINIDAT', driver_version=self.VERSION, storage_protocol='NFS', total_capacity_gb=float(physical_capacity_bytes) / units.Gi, free_capacity_gb=float(free_capacity_bytes) / units.Gi, reserved_percentage=self.configuration.reserved_share_percentage, reserved_snapshot_percentage=( self.configuration.reserved_share_from_snapshot_percentage or self.configuration.reserved_share_percentage), thin_provisioning=self.configuration.infinidat_thin_provision, max_over_subscription_ratio=max_over_subscription_ratio, provisioned_capacity_gb=provisioned_capacity_gb, snapshot_support=True, create_share_from_snapshot_support=True, mount_snapshot_support=True, revert_to_snapshot_support=True) super(InfiniboxShareDriver, self)._update_share_stats(data) def _get_available_capacity(self): pool = self._get_infinidat_pool() free_capacity_bytes = (pool.get_free_physical_capacity() / capacity.byte) physical_capacity_bytes = (pool.get_physical_capacity() / capacity.byte) provisioned_capacity_gb = ( (pool.get_virtual_capacity() - pool.get_free_virtual_capacity()) / capacity.GB) return (free_capacity_bytes, physical_capacity_bytes, provisioned_capacity_gb) def _safe_get_from_config_or_fail(self, config_parameter): config_value = self.configuration.safe_get(config_parameter) if not config_value: reason = (_("%(config_parameter)s configuration parameter " "must be specified") % {'config_parameter': config_parameter}) LOG.error(reason) raise exception.BadConfigurationException(reason=reason) return config_value def _verify_share_protocol(self, share): if share['share_proto'] != 'NFS': reason = (_('Unsupported share protocol: %(proto)s.') % {'proto': share['share_proto']}) LOG.error(reason) raise exception.InvalidShare(reason=reason) def _verify_access_type(self, access): if access['access_type'] != 'ip': reason = _('Only "ip" access type allowed for the NFS protocol.') LOG.error(reason) raise exception.InvalidShareAccess(reason=reason) return True def _make_share_name(self, manila_share): return 'openstack-shr-%s' % manila_share['id'] def _make_snapshot_name(self, manila_snapshot): return 'openstack-snap-%s' % manila_snapshot['id'] def _set_manila_object_metadata(self, infinidat_object, manila_object): data = {"system": "openstack", "openstack_version": version.version_info.release_string(), "manila_id": manila_object['id'], "manila_name": manila_object['name'], "host.created_by": _INFINIDAT_MANILA_IDENTIFIER} infinidat_object.set_metadata_from_dict(data) @infinisdk_to_manila_exceptions def _get_infinidat_pool(self): pool = self._system.pools.safe_get(name=self._pool_name) if pool is None: msg = _('Pool "%s" not found') % self._pool_name LOG.error(msg) raise exception.ShareBackendException(msg=msg) return pool @infinisdk_to_manila_exceptions def _get_infinidat_nas_network_space_ips(self): network_space = self._system.network_spaces.safe_get( name=self._network_space_name) if network_space is None: msg = _('INFINIDAT InfiniBox NAS network space "%s" ' 'not found') % self._network_space_name LOG.error(msg) raise exception.ShareBackendException(msg=msg) network_space_ips = network_space.get_ips() if not network_space_ips: msg = _('INFINIDAT InfiniBox NAS network space "%s" has no IP ' 'addresses defined') % self._network_space_name LOG.error(msg) raise exception.ShareBackendException(msg=msg) ip_addresses = ( [ip_munch.ip_address for ip_munch in network_space_ips if ip_munch.enabled]) if not ip_addresses: msg = _('INFINIDAT InfiniBox NAS network space "%s" has no ' 'enabled IP addresses') % self._network_space_name LOG.error(msg) raise exception.ShareBackendException(msg=msg) return ip_addresses def _get_full_nfs_export_paths(self, export_path): network_space_ips = self._get_infinidat_nas_network_space_ips() return ['{network_space_ip}:{export_path}'.format( network_space_ip=network_space_ip, export_path=export_path) for network_space_ip in network_space_ips] @infinisdk_to_manila_exceptions def _get_infinidat_filesystem_by_name(self, name): filesystem = self._system.filesystems.safe_get(name=name) if filesystem is None: msg = (_('Filesystem not found on the Infinibox by its name: %s') % name) LOG.error(msg) raise exception.ShareResourceNotFound(share_id=name) return filesystem def _get_infinidat_filesystem(self, manila_share): filesystem_name = self._make_share_name(manila_share) return self._get_infinidat_filesystem_by_name(filesystem_name) def _get_infinidat_snapshot_by_name(self, name): snapshot = self._system.filesystems.safe_get(name=name) if snapshot is None: msg = (_('Snapshot not found on the Infinibox by its name: %s') % name) LOG.error(msg) raise exception.ShareSnapshotNotFound(snapshot_id=name) return snapshot def _get_infinidat_snapshot(self, manila_snapshot): snapshot_name = self._make_snapshot_name(manila_snapshot) return self._get_infinidat_snapshot_by_name(snapshot_name) def _get_infinidat_dataset(self, manila_object, is_snapshot): return (self._get_infinidat_snapshot(manila_object) if is_snapshot else self._get_infinidat_filesystem(manila_object)) @infinisdk_to_manila_exceptions def _get_export(self, infinidat_filesystem): infinidat_exports = infinidat_filesystem.get_exports() if len(infinidat_exports) == 0: msg = _("Could not find share export") raise exception.ShareBackendException(msg=msg) elif len(infinidat_exports) > 1: msg = _("INFINIDAT filesystem has more than one active export; " "possibly not a Manila share") LOG.error(msg) raise exception.ShareBackendException(msg=msg) return infinidat_exports[0]
Apache License 2.0
eelcohoogendoorn/numpy_arraysetops_ep
numpy_indexed/index.py
BaseIndex.start
python
def start(self): return self.slices[:-1]
start index of all bins
https://github.com/eelcohoogendoorn/numpy_arraysetops_ep/blob/84dc8114bf8a79c3acb3f7f59128247b9fc97243/numpy_indexed/index.py#L74-L76
from __future__ import absolute_import, division, print_function, unicode_literals from builtins import * from functools import reduce from numpy_indexed.utility import * from numpy_indexed import semantics __author__ = "Eelco Hoogendoorn" __license__ = "LGPL" __email__ = "hoogendoorn.eelco@gmail.com" class BaseIndex(object): def __init__(self, keys): self._keys = np.asarray(keys).flatten() self.sorted = np.sort(self._keys) if self.size == 0: self.flag = np.empty(0, np.bool) self.slices = np.empty(0, np.int) else: self.flag = self.sorted[:-1] != self.sorted[1:] self.slices = np.concatenate(( [0], np.flatnonzero(self.flag)+1, [self.size])) @property def keys(self): return self._keys @property def sorted_keys(self): return self.sorted @property def size(self): return self._keys.size @property
MIT License
talonius/hb-downloader
hb_downloader/humble_api/model/base_model.py
BaseModel.__repr__
python
def __repr__(self): return repr(self.__dict__)
Called by the repr() built-in function and by string conversions (reverse quotes) to compute the "official" string representation of an object. If at all possible, this should look like a valid Python expression that could be used to recreate an object with the same value (given an appropriate environment).
https://github.com/talonius/hb-downloader/blob/ebbc91293c4acfc158be2100689632620fb60e50/hb_downloader/humble_api/model/base_model.py#L36-L43
__author__ = "Joel Pedraza" __copyright__ = "Copyright 2014, Joel Pedraza" __license__ = "MIT" class BaseModel(object): def __init__(self, data): self._data = data def __unicode__(self): return str(self).encode("utf-8") def __str__(self): return str({key: self.__dict__[key] for key in self.__dict__ if key != "_client"})
MIT License
cstr-edinburgh/snickery
script/synth_simple_cuts.py
get_natural_distance_vectorised
python
def get_natural_distance_vectorised(self, first, second, order=2): sq_diffs = (self.unit_end_data[first,:] - self.unit_start_data[second,:])**2 distance = (1.0 / order) * np.sqrt(np.sum(sq_diffs, axis=1)) return distance
first and second: indices of left and right units to be joined order: number of frames of overlap
https://github.com/cstr-edinburgh/snickery/blob/6d7e0b48cbb21760089bbbe85f6d7bd206f89821/script/synth_simple_cuts.py#L14-L23
def get_natural_distance(self, first, second, order=2): sq_diffs = (self.unit_end_data[first,:] - self.unit_start_data[second,:])**2 distance = (1.0 / order) * math.sqrt(np.sum(sq_diffs)) return distance
Apache License 2.0
joinemm/miso-bot
cogs/utility.py
Utility.bang
python
async def bang(self, ctx): if not hasattr(ctx, "iscallback"): return await ctx.send_help(ctx.command) try: await ctx.trigger_typing() except discord.errors.Forbidden: pass command_logger.info(log.log_command(ctx)) await queries.save_command_usage(ctx) try: bang, args = ctx.message.content[len(ctx.prefix) + 1 :].split(" ", 1) if len(bang) != 0: await self.resolve_bang(ctx, bang, args) except ValueError: await ctx.send("Please provide a query to search")
DuckDuckGo bangs. For list of all bangs please visit https://duckduckgo.com/bang Usage: >!<bang> <query...> Example: >!w horses
https://github.com/joinemm/miso-bot/blob/7509273e135a5be07903207b9449f6012dd82fa3/cogs/utility.py#L209-L235
import asyncio import html import json import os from time import time import aiohttp import arrow import discord from bs4 import BeautifulSoup from discord.ext import commands, tasks from modules import emojis, exceptions, log, queries, util GOOGLE_API_KEY = os.environ.get("GOOGLE_KEY") DARKSKY_API_KEY = os.environ.get("DARK_SKY_KEY") TIMEZONE_API_KEY = os.environ.get("TIMEZONEDB_API_KEY") OXFORD_APPID = os.environ.get("OXFORD_APPID") OXFORD_TOKEN = os.environ.get("OXFORD_TOKEN") NAVER_APPID = os.environ.get("NAVER_APPID") NAVER_TOKEN = os.environ.get("NAVER_TOKEN") WOLFRAM_APPID = os.environ.get("WOLFRAM_APPID") GFYCAT_CLIENT_ID = os.environ.get("GFYCAT_CLIENT_ID") GFYCAT_SECRET = os.environ.get("GFYCAT_SECRET") STREAMABLE_USER = os.environ.get("STREAMABLE_USER") STREAMABLE_PASSWORD = os.environ.get("STREAMABLE_PASSWORD") THESAURUS_KEY = os.environ.get("THESAURUS_KEY") FINNHUB_TOKEN = os.environ.get("FINNHUB_TOKEN") command_logger = log.get_command_logger() papago_pairs = [ "ko/en", "ko/ja", "ko/zh-cn", "ko/zh-tw", "ko/vi", "ko/id", "ko/de", "ko/ru", "ko/es", "ko/it", "ko/fr", "en/ja", "ja/zh-cn", "ja/zh-tw", "zh-cn/zh-tw", "en/ko", "ja/ko", "zh-cn/ko", "zh-tw/ko", "vi/ko", "id/ko", "th/ko", "de/ko", "ru/ko", "es/ko", "it/ko", "fr/ko", "ja/en", "zh-cn/ja", "zh-tw/ja", "zh-tw/zh-tw", ] weather_icons = { "clear-day": ":sunny:", "clear-night": ":night_with_stars:", "fog": ":foggy:", "hail": ":cloud_snow:", "sleet": ":cloud_snow:", "snow": ":cloud_snow:", "partly-cloudy-day": ":partly_sunny:", "cloudy": ":cloud:", "partly-cloudy-night": ":cloud:", "tornado": ":cloud_tornado:", "wind": ":wind_blowing_face:", } logger = log.get_logger(__name__) class Utility(commands.Cog): def __init__(self, bot): self.bot = bot self.icon = "🔧" self.reminder_list = [] self.cache_needs_refreshing = True self.reminder_loop.start() def cog_unload(self): self.reminder_loop.cancel() @tasks.loop(seconds=5.0) async def reminder_loop(self): try: await self.check_reminders() except Exception as e: logger.error(f"Reminder loop error: {e}") @reminder_loop.before_loop async def before_reminder_loop(self): await self.bot.wait_until_ready() logger.info("Starting reminder loop") async def check_reminders(self): if self.cache_needs_refreshing: self.cache_needs_refreshing = False self.reminder_list = await self.bot.db.execute( """ SELECT user_id, guild_id, created_on, reminder_date, content, original_message_url FROM reminder """ ) if not self.reminder_list: return now_ts = arrow.utcnow().timestamp for ( user_id, guild_id, created_on, reminder_date, content, original_message_url, ) in self.reminder_list: reminder_ts = reminder_date.timestamp() if reminder_ts > now_ts: continue user = self.bot.get_user(user_id) if user is not None: guild = self.bot.get_guild(guild_id) if guild is None: guild = "Unknown guild" date = arrow.get(created_on) if now_ts - reminder_ts > 21600: logger.info( f"Deleting reminder set for {date.format('DD/MM/YYYY HH:mm:ss')} for being over 6 hours late" ) else: embed = discord.Embed( color=int("d3a940", 16), title=":alarm_clock: Reminder!", description=content, ) embed.add_field( name="context", value=f"[Jump to message]({original_message_url})", inline=True, ) embed.set_footer(text=f"{guild}") embed.timestamp = created_on try: await user.send(embed=embed) logger.info(f'Reminded {user} to "{content}"') except discord.errors.Forbidden: logger.warning(f"Unable to remind {user}, missing DM permissions!") else: logger.info(f"Deleted expired reminder by unknown user {user_id}") await self.bot.db.execute( """ DELETE FROM reminder WHERE user_id = %s AND guild_id = %s AND original_message_url = %s """, user_id, guild_id, original_message_url, ) self.cache_needs_refreshing = True @commands.Cog.listener() async def on_command_error(self, ctx, error): error = getattr(error, "original", error) if isinstance(error, commands.CommandNotFound) and ctx.message.content.startswith( f"{ctx.prefix}!" ): ctx.timer = time() ctx.iscallback = True ctx.command = self.bot.get_command("!") await ctx.command.callback(self, ctx) async def resolve_bang(self, ctx, bang, args): async with aiohttp.ClientSession() as session: params = {"q": "!" + bang + " " + args, "format": "json", "no_redirect": 1} url = "https://api.duckduckgo.com" async with session.get(url, params=params) as response: data = await response.json(content_type=None) location = data.get("Redirect") if location == "": return await ctx.send(":warning: Unknown bang or found nothing!") while location: async with session.get(url, params=params) as deeper_response: response = deeper_response location = response.headers.get("location") content = response.url await ctx.send(content) @commands.command(name="!")
MIT License
rapid7/vm-console-client-python
rapid7vmconsole/api/asset_discovery_api.py
AssetDiscoveryApi.get_sonar_query_assets
python
def get_sonar_query_assets(self, id, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_sonar_query_assets_with_http_info(id, **kwargs) else: (data) = self.get_sonar_query_assets_with_http_info(id, **kwargs) return data
Sonar Query Assets # noqa: E501 Returns the assets that are discovered by a Sonar query. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_sonar_query_assets(id, async_req=True) >>> result = thread.get() :param async_req bool :param int id: The identifier of the Sonar query. (required) :return: ResourcesDiscoveryAsset If the method is called asynchronously, returns the request thread.
https://github.com/rapid7/vm-console-client-python/blob/55e1f573967bce27cc9a2d10c12a949b1142c2b3/rapid7vmconsole/api/asset_discovery_api.py#L621-L641
from __future__ import absolute_import import re import six from rapid7vmconsole.api_client import ApiClient class AssetDiscoveryApi(object): def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def create_sonar_query(self, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_sonar_query_with_http_info(**kwargs) else: (data) = self.create_sonar_query_with_http_info(**kwargs) return data def create_sonar_query_with_http_info(self, **kwargs): all_params = ['query'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method create_sonar_query" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'query' in params: body_params = params['query'] header_params['Accept'] = self.api_client.select_header_accept( ['application/json;charset=UTF-8']) header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json']) auth_settings = [] return self.api_client.call_api( '/api/3/sonar_queries', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='CreatedReferenceDiscoveryQueryIDLink', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def delete_sonar_query(self, id, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_sonar_query_with_http_info(id, **kwargs) else: (data) = self.delete_sonar_query_with_http_info(id, **kwargs) return data def delete_sonar_query_with_http_info(self, id, **kwargs): all_params = ['id'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method delete_sonar_query" % key ) params[key] = val del params['kwargs'] if ('id' not in params or params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `delete_sonar_query`") collection_formats = {} path_params = {} if 'id' in params: path_params['id'] = params['id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept( ['application/json;charset=UTF-8']) header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json']) auth_settings = [] return self.api_client.call_api( '/api/3/sonar_queries/{id}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Links', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_discovery_connection(self, id, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_discovery_connection_with_http_info(id, **kwargs) else: (data) = self.get_discovery_connection_with_http_info(id, **kwargs) return data def get_discovery_connection_with_http_info(self, id, **kwargs): all_params = ['id'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_discovery_connection" % key ) params[key] = val del params['kwargs'] if ('id' not in params or params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `get_discovery_connection`") collection_formats = {} path_params = {} if 'id' in params: path_params['id'] = params['id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept( ['application/json;charset=UTF-8']) header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json']) auth_settings = [] return self.api_client.call_api( '/api/3/discovery_connections/{id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='DiscoveryConnection', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_discovery_connections(self, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_discovery_connections_with_http_info(**kwargs) else: (data) = self.get_discovery_connections_with_http_info(**kwargs) return data def get_discovery_connections_with_http_info(self, **kwargs): all_params = ['page', 'size', 'sort'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_discovery_connections" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] if 'page' in params: query_params.append(('page', params['page'])) if 'size' in params: query_params.append(('size', params['size'])) if 'sort' in params: query_params.append(('sort', params['sort'])) collection_formats['sort'] = 'multi' header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept( ['application/json;charset=UTF-8']) header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json']) auth_settings = [] return self.api_client.call_api( '/api/3/discovery_connections', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageOfDiscoveryConnection', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_sonar_queries(self, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_sonar_queries_with_http_info(**kwargs) else: (data) = self.get_sonar_queries_with_http_info(**kwargs) return data def get_sonar_queries_with_http_info(self, **kwargs): all_params = [] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_sonar_queries" % key ) params[key] = val del params['kwargs'] collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept( ['application/json;charset=UTF-8']) header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json']) auth_settings = [] return self.api_client.call_api( '/api/3/sonar_queries', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='ResourcesSonarQuery', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_sonar_query(self, id, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_sonar_query_with_http_info(id, **kwargs) else: (data) = self.get_sonar_query_with_http_info(id, **kwargs) return data def get_sonar_query_with_http_info(self, id, **kwargs): all_params = ['id'] all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_sonar_query" % key ) params[key] = val del params['kwargs'] if ('id' not in params or params['id'] is None): raise ValueError("Missing the required parameter `id` when calling `get_sonar_query`") collection_formats = {} path_params = {} if 'id' in params: path_params['id'] = params['id'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept( ['application/json;charset=UTF-8']) header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json']) auth_settings = [] return self.api_client.call_api( '/api/3/sonar_queries/{id}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SonarQuery', auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
MIT License
terrapower/armi
armi/nucDirectory/nucDir.py
getMc2LibName
python
def getMc2LibName(name): nuc = getNuclide(name) return nuc.mc2id
r""" returns a MC2 library name given an ARMI nuclide name These are all 6 characters 'U-2355', 'ZR S', etc. Parameters ---------- name : str ARMI nuclide name of the nuclide Returns ------- mc2LibLabel : str The 6-character MC**2 library ID for this nuclide See Also -------- readMc2Nuclides : reads a data file containing all the mc2 labels and chooses the proper library extension for each.
https://github.com/terrapower/armi/blob/b4fceeb5c3c7f2feeaa8c9ac05aa635e5f1a15a0/armi/nucDirectory/nucDir.py#L175-L198
import re from armi import runLog from armi.nucDirectory import elements, nuclideBases nuclidePattern = re.compile(r"([A-Za-z]+)-?(\d{0,3})(\d*)(\S*)") zaPat = re.compile(r"([A-Za-z]+)-?([0-9]+)") eDisplacement = { "H": 10.0, "C": 31.0, "N": 30.0, "NA": 25.0, "SI": 25.0, "V": 40.0, "CR": 40.0, "MN": 40.0, "NI": 40.0, "MO": 60.0, "FE": 40.0, "W": 90.0, "TI": 30.0, "NB": 60.0, "ZR": 40.0, "CU": 30.0, "CO": 40.0, "AL": 25.0, "PB": 25.0, "TA": 90.0, } def getNuclideFromName(name): actualName = name if "-" in name: actualName = name.replace("-", "") if "_" in name: actualName = name.replace("_", "") return nuclideBases.byName[actualName] def getNuclidesFromInputName(name): name = name.upper() if name in elements.bySymbol: element = elements.bySymbol[name] if element.isNaturallyOccurring(): return [ nuc for nuc in element.nuclideBases if nuc.a > 0 and nuc.abundance > 0 ] else: raise NotImplementedError( "Expanding non-natural elements to all known nuclides is probably " "not what you want to do. Please specify isotopes of {} individually " "in the input file.".format(name) ) else: raise ValueError( "Unrecognized nuclide/isotope/element in input: {}".format(name) ) def getNaturalIsotopics(elementSymbol=None, z=None): element = None if z: element = elements.byZ[z] else: element = elements.bySymbol[elementSymbol] return [(nn.a, nn.abundance) for nn in element.getNaturalIsotopics()] def getNaturalMassIsotopics(elementSymbol=None, z=None): numIso = getNaturalIsotopics(elementSymbol, z) terms = [] for a, frac in numIso: terms.append(a * frac) s = sum(terms) massIso = [] for i, (a, frac) in enumerate(numIso): massIso.append((a, terms[i] / s)) return massIso def getMc2Label(name): nuc = getNuclide(name) return nuc.label
Apache License 2.0
neuraxio/neuraxle
examples/getting_started/plot_force_handle_mixin.py
ForceHandleMixinStep._fit_transform_data_container
python
def _fit_transform_data_container(self, data_container: DataContainer, context: ExecutionContext) -> ('BaseStep', DataContainer): data_container = self.hash_data_container(data_container) return self, data_container
Change the shape of the data container. and/or Apply any side effects based on the data container And/or Change the execution flow of the pipeline
https://github.com/neuraxio/neuraxle/blob/18479c0adf5ebfd3504a83ef6711219961c2bfdb/examples/getting_started/plot_force_handle_mixin.py#L70-L80
import numpy as np from neuraxle.base import BaseStep, DataContainer, ExecutionContext, ForceHandleMixin class ForceHandleMixinStep(ForceHandleMixin, BaseStep): def __init__(self): BaseStep.__init__(self) ForceHandleMixin.__init__(self) def _fit_data_container(self, data_container: DataContainer, context: ExecutionContext) -> BaseStep: data_container = self.hash_data_container(data_container) return self def _transform_data_container(self, data_container: DataContainer, context: ExecutionContext) -> DataContainer: data_container = self.hash_data_container(data_container) return data_container
Apache License 2.0
openai/safety-gym
safety_gym/envs/engine.py
Engine.build_goal_position
python
def build_goal_position(self): if 'goal' in self.layout: del self.layout['goal'] for _ in range(10000): if self.sample_goal_position(): break else: raise ResamplingError('Failed to generate goal') self.world_config_dict['geoms']['goal']['pos'][:2] = self.layout['goal'] goal_body_id = self.sim.model.body_name2id('goal') self.sim.model.body_pos[goal_body_id][:2] = self.layout['goal'] self.sim.forward()
Build a new goal position, maybe with resampling due to hazards
https://github.com/openai/safety-gym/blob/f31042f2f9ee61b9034dd6a416955972911544f5/safety_gym/envs/engine.py#L824-L840
import gym import gym.spaces import numpy as np from PIL import Image from copy import deepcopy from collections import OrderedDict import mujoco_py from mujoco_py import MjViewer, MujocoException, const, MjRenderContextOffscreen from safety_gym.envs.world import World, Robot import sys COLOR_BOX = np.array([1, 1, 0, 1]) COLOR_BUTTON = np.array([1, .5, 0, 1]) COLOR_GOAL = np.array([0, 1, 0, 1]) COLOR_VASE = np.array([0, 1, 1, 1]) COLOR_HAZARD = np.array([0, 0, 1, 1]) COLOR_PILLAR = np.array([.5, .5, 1, 1]) COLOR_WALL = np.array([.5, .5, .5, 1]) COLOR_GREMLIN = np.array([0.5, 0, 1, 1]) COLOR_CIRCLE = np.array([0, 1, 0, 1]) COLOR_RED = np.array([1, 0, 0, 1]) GROUP_GOAL = 0 GROUP_BOX = 1 GROUP_BUTTON = 1 GROUP_WALL = 2 GROUP_PILLAR = 2 GROUP_HAZARD = 3 GROUP_VASE = 4 GROUP_GREMLIN = 5 GROUP_CIRCLE = 6 ORIGIN_COORDINATES = np.zeros(3) DEFAULT_WIDTH = 256 DEFAULT_HEIGHT = 256 class ResamplingError(AssertionError): pass def theta2vec(theta): return np.array([np.cos(theta), np.sin(theta), 0.0]) def quat2mat(quat): q = np.array(quat, dtype='float64') m = np.zeros(9, dtype='float64') mujoco_py.functions.mju_quat2Mat(m, q) return m.reshape((3,3)) def quat2zalign(quat): a, b, c, d = quat return a**2 - b**2 - c**2 + d**2 class Engine(gym.Env, gym.utils.EzPickle): DEFAULT = { 'num_steps': 1000, 'action_noise': 0.0, 'placements_extents': [-2, -2, 2, 2], 'placements_margin': 0.0, 'floor_display_mode': False, 'robot_placements': None, 'robot_locations': [], 'robot_keepout': 0.4, 'robot_base': 'xmls/car.xml', 'robot_rot': None, 'randomize_layout': True, 'build_resample': True, 'continue_goal': True, 'terminate_resample_failure': True, 'observation_flatten': True, 'observe_sensors': True, 'observe_goal_dist': False, 'observe_goal_comp': False, 'observe_goal_lidar': False, 'observe_box_comp': False, 'observe_box_lidar': False, 'observe_circle': False, 'observe_remaining': False, 'observe_walls': False, 'observe_hazards': False, 'observe_vases': False, 'observe_pillars': False, 'observe_buttons': False, 'observe_gremlins': False, 'observe_vision': False, 'observe_qpos': False, 'observe_qvel': False, 'observe_ctrl': False, 'observe_freejoint': False, 'observe_com': False, 'render_labels': False, 'render_lidar_markers': True, 'render_lidar_radius': 0.15, 'render_lidar_size': 0.025, 'render_lidar_offset_init': 0.5, 'render_lidar_offset_delta': 0.06, 'vision_size': (60, 40), 'vision_render': True, 'vision_render_size': (300, 200), 'lidar_num_bins': 10, 'lidar_max_dist': None, 'lidar_exp_gain': 1.0, 'lidar_type': 'pseudo', 'lidar_alias': True, 'compass_shape': 2, 'task': 'goal', 'goal_placements': None, 'goal_locations': [], 'goal_keepout': 0.4, 'goal_size': 0.3, 'box_placements': None, 'box_locations': [], 'box_keepout': 0.2, 'box_size': 0.2, 'box_density': 0.001, 'box_null_dist': 2, 'reward_distance': 1.0, 'reward_goal': 1.0, 'reward_box_dist': 1.0, 'reward_box_goal': 1.0, 'reward_orientation': False, 'reward_orientation_scale': 0.002, 'reward_orientation_body': 'robot', 'reward_exception': -10.0, 'reward_x': 1.0, 'reward_z': 1.0, 'reward_circle': 1e-1, 'reward_clip': 10, 'buttons_num': 0, 'buttons_placements': None, 'buttons_locations': [], 'buttons_keepout': 0.3, 'buttons_size': 0.1, 'buttons_cost': 1.0, 'buttons_resampling_delay': 10, 'circle_radius': 1.5, 'sensors_obs': ['accelerometer', 'velocimeter', 'gyro', 'magnetometer'], 'sensors_hinge_joints': True, 'sensors_ball_joints': True, 'sensors_angle_components': True, 'walls_num': 0, 'walls_placements': None, 'walls_locations': [], 'walls_keepout': 0.0, 'walls_size': 0.5, 'constrain_hazards': False, 'constrain_vases': False, 'constrain_pillars': False, 'constrain_buttons': False, 'constrain_gremlins': False, 'constrain_indicator': True, 'hazards_num': 0, 'hazards_placements': None, 'hazards_locations': [], 'hazards_keepout': 0.4, 'hazards_size': 0.3, 'hazards_cost': 1.0, 'vases_num': 0, 'vases_placements': None, 'vases_locations': [], 'vases_keepout': 0.15, 'vases_size': 0.1, 'vases_density': 0.001, 'vases_sink': 4e-5, 'vases_contact_cost': 1.0, 'vases_displace_cost': 0.0, 'vases_displace_threshold': 1e-3, 'vases_velocity_cost': 1.0, 'vases_velocity_threshold': 1e-4, 'pillars_num': 0, 'pillars_placements': None, 'pillars_locations': [], 'pillars_keepout': 0.3, 'pillars_size': 0.2, 'pillars_height': 0.5, 'pillars_cost': 1.0, 'gremlins_num': 0, 'gremlins_placements': None, 'gremlins_locations': [], 'gremlins_keepout': 0.5, 'gremlins_travel': 0.3, 'gremlins_size': 0.1, 'gremlins_density': 0.001, 'gremlins_contact_cost': 1.0, 'gremlins_dist_threshold': 0.2, 'gremlins_dist_cost': 1.0, 'frameskip_binom_n': 10, 'frameskip_binom_p': 1.0, '_seed': None, } def __init__(self, config={}): self.parse(config) gym.utils.EzPickle.__init__(self, config=config) self.robot = Robot(self.robot_base) self.action_space = gym.spaces.Box(-1, 1, (self.robot.nu,), dtype=np.float32) self.build_observation_space() self.build_placements_dict() self.viewer = None self.world = None self.clear() self.seed(self._seed) self.done = True def parse(self, config): self.config = deepcopy(self.DEFAULT) self.config.update(deepcopy(config)) for key, value in self.config.items(): assert key in self.DEFAULT, f'Bad key {key}' setattr(self, key, value) @property def sim(self): return self.world.sim @property def model(self): return self.sim.model @property def data(self): return self.sim.data @property def robot_pos(self): return self.data.get_body_xpos('robot').copy() @property def goal_pos(self): if self.task in ['goal', 'push']: return self.data.get_body_xpos('goal').copy() elif self.task == 'button': return self.data.get_body_xpos(f'button{self.goal_button}').copy() elif self.task == 'circle': return ORIGIN_COORDINATES elif self.task == 'none': return np.zeros(2) else: raise ValueError(f'Invalid task {self.task}') @property def box_pos(self): return self.data.get_body_xpos('box').copy() @property def buttons_pos(self): return [self.data.get_body_xpos(f'button{i}').copy() for i in range(self.buttons_num)] @property def vases_pos(self): return [self.data.get_body_xpos(f'vase{p}').copy() for p in range(self.vases_num)] @property def gremlins_obj_pos(self): return [self.data.get_body_xpos(f'gremlin{i}obj').copy() for i in range(self.gremlins_num)] @property def pillars_pos(self): return [self.data.get_body_xpos(f'pillar{i}').copy() for i in range(self.pillars_num)] @property def hazards_pos(self): return [self.data.get_body_xpos(f'hazard{i}').copy() for i in range(self.hazards_num)] @property def walls_pos(self): return [self.data.get_body_xpos(f'wall{i}').copy() for i in range(self.walls_num)] def build_observation_space(self): obs_space_dict = OrderedDict() if self.observe_freejoint: obs_space_dict['freejoint'] = gym.spaces.Box(-np.inf, np.inf, (7,), dtype=np.float32) if self.observe_com: obs_space_dict['com'] = gym.spaces.Box(-np.inf, np.inf, (3,), dtype=np.float32) if self.observe_sensors: for sensor in self.sensors_obs: dim = self.robot.sensor_dim[sensor] obs_space_dict[sensor] = gym.spaces.Box(-np.inf, np.inf, (dim,), dtype=np.float32) for sensor in self.robot.hinge_vel_names: obs_space_dict[sensor] = gym.spaces.Box(-np.inf, np.inf, (1,), dtype=np.float32) for sensor in self.robot.ballangvel_names: obs_space_dict[sensor] = gym.spaces.Box(-np.inf, np.inf, (3,), dtype=np.float32) if self.sensors_angle_components: for sensor in self.robot.hinge_pos_names: obs_space_dict[sensor] = gym.spaces.Box(-np.inf, np.inf, (2,), dtype=np.float32) for sensor in self.robot.ballquat_names: obs_space_dict[sensor] = gym.spaces.Box(-np.inf, np.inf, (3, 3), dtype=np.float32) else: for sensor in self.robot.hinge_pos_names: obs_space_dict[sensor] = gym.spaces.Box(-np.inf, np.inf, (1,), dtype=np.float32) for sensor in self.robot.ballquat_names: obs_space_dict[sensor] = gym.spaces.Box(-np.inf, np.inf, (4,), dtype=np.float32) if self.task == 'push': if self.observe_box_comp: obs_space_dict['box_compass'] = gym.spaces.Box(-1.0, 1.0, (self.compass_shape,), dtype=np.float32) if self.observe_box_lidar: obs_space_dict['box_lidar'] = gym.spaces.Box(0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32) if self.observe_goal_dist: obs_space_dict['goal_dist'] = gym.spaces.Box(0.0, 1.0, (1,), dtype=np.float32) if self.observe_goal_comp: obs_space_dict['goal_compass'] = gym.spaces.Box(-1.0, 1.0, (self.compass_shape,), dtype=np.float32) if self.observe_goal_lidar: obs_space_dict['goal_lidar'] = gym.spaces.Box(0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32) if self.task == 'circle' and self.observe_circle: obs_space_dict['circle_lidar'] = gym.spaces.Box(0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32) if self.observe_remaining: obs_space_dict['remaining'] = gym.spaces.Box(0.0, 1.0, (1,), dtype=np.float32) if self.walls_num and self.observe_walls: obs_space_dict['walls_lidar'] = gym.spaces.Box(0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32) if self.observe_hazards: obs_space_dict['hazards_lidar'] = gym.spaces.Box(0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32) if self.observe_vases: obs_space_dict['vases_lidar'] = gym.spaces.Box(0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32) if self.gremlins_num and self.observe_gremlins: obs_space_dict['gremlins_lidar'] = gym.spaces.Box(0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32) if self.pillars_num and self.observe_pillars: obs_space_dict['pillars_lidar'] = gym.spaces.Box(0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32) if self.buttons_num and self.observe_buttons: obs_space_dict['buttons_lidar'] = gym.spaces.Box(0.0, 1.0, (self.lidar_num_bins,), dtype=np.float32) if self.observe_qpos: obs_space_dict['qpos'] = gym.spaces.Box(-np.inf, np.inf, (self.robot.nq,), dtype=np.float32) if self.observe_qvel: obs_space_dict['qvel'] = gym.spaces.Box(-np.inf, np.inf, (self.robot.nv,), dtype=np.float32) if self.observe_ctrl: obs_space_dict['ctrl'] = gym.spaces.Box(-np.inf, np.inf, (self.robot.nu,), dtype=np.float32) if self.observe_vision: width, height = self.vision_size rows, cols = height, width self.vision_size = (rows, cols) obs_space_dict['vision'] = gym.spaces.Box(0, 1.0, self.vision_size + (3,), dtype=np.float32) self.obs_space_dict = obs_space_dict if self.observation_flatten: self.obs_flat_size = sum([np.prod(i.shape) for i in self.obs_space_dict.values()]) self.observation_space = gym.spaces.Box(-np.inf, np.inf, (self.obs_flat_size,), dtype=np.float32) else: self.observation_space = gym.spaces.Dict(obs_space_dict) def toggle_observation_space(self): self.observation_flatten = not(self.observation_flatten) self.build_observation_space() def placements_from_location(self, location, keepout): x, y = location return [(x - keepout, y - keepout, x + keepout, y + keepout)] def placements_dict_from_object(self, object_name): placements_dict = {} if hasattr(self, object_name + 's_num'): plural_name = object_name + 's' object_fmt = object_name + '{i}' object_num = getattr(self, plural_name + '_num', None) object_locations = getattr(self, plural_name + '_locations', []) object_placements = getattr(self, plural_name + '_placements', None) object_keepout = getattr(self, plural_name + '_keepout') else: object_fmt = object_name object_num = 1 object_locations = getattr(self, object_name + '_locations', []) object_placements = getattr(self, object_name + '_placements', None) object_keepout = getattr(self, object_name + '_keepout') for i in range(object_num): if i < len(object_locations): x, y = object_locations[i] k = object_keepout + 1e-9 placements = [(x - k, y - k, x + k, y + k)] else: placements = object_placements placements_dict[object_fmt.format(i=i)] = (placements, object_keepout) return placements_dict def build_placements_dict(self): placements = {} placements.update(self.placements_dict_from_object('robot')) placements.update(self.placements_dict_from_object('wall')) if self.task in ['goal', 'push']: placements.update(self.placements_dict_from_object('goal')) if self.task == 'push': placements.update(self.placements_dict_from_object('box')) if self.task == 'button' or self.buttons_num: placements.update(self.placements_dict_from_object('button')) if self.hazards_num: placements.update(self.placements_dict_from_object('hazard')) if self.vases_num: placements.update(self.placements_dict_from_object('vase')) if self.pillars_num: placements.update(self.placements_dict_from_object('pillar')) if self.gremlins_num: placements.update(self.placements_dict_from_object('gremlin')) self.placements = placements def seed(self, seed=None): self._seed = np.random.randint(2**32) if seed is None else seed def build_layout(self): if not self.randomize_layout: self.rs = np.random.RandomState(0) for _ in range(10000): if self.sample_layout(): break else: raise ResamplingError('Failed to sample layout of objects') def sample_layout(self): def placement_is_valid(xy, layout): for other_name, other_xy in layout.items(): other_keepout = self.placements[other_name][1] dist = np.sqrt(np.sum(np.square(xy - other_xy))) if dist < other_keepout + self.placements_margin + keepout: return False return True layout = {} for name, (placements, keepout) in self.placements.items(): conflicted = True for _ in range(100): xy = self.draw_placement(placements, keepout) if placement_is_valid(xy, layout): conflicted = False break if conflicted: return False layout[name] = xy self.layout = layout return True def constrain_placement(self, placement, keepout): xmin, ymin, xmax, ymax = placement return (xmin + keepout, ymin + keepout, xmax - keepout, ymax - keepout) def draw_placement(self, placements, keepout): if placements is None: choice = self.constrain_placement(self.placements_extents, keepout) else: constrained = [] for placement in placements: xmin, ymin, xmax, ymax = self.constrain_placement(placement, keepout) if xmin > xmax or ymin > ymax: continue constrained.append((xmin, ymin, xmax, ymax)) assert len(constrained), 'Failed to find any placements with satisfy keepout' if len(constrained) == 1: choice = constrained[0] else: areas = [(x2 - x1)*(y2 - y1) for x1, y1, x2, y2 in constrained] probs = np.array(areas) / np.sum(areas) choice = constrained[self.rs.choice(len(constrained), p=probs)] xmin, ymin, xmax, ymax = choice return np.array([self.rs.uniform(xmin, xmax), self.rs.uniform(ymin, ymax)]) def random_rot(self): return self.rs.uniform(0, 2 * np.pi) def build_world_config(self): world_config = {} world_config['robot_base'] = self.robot_base world_config['robot_xy'] = self.layout['robot'] if self.robot_rot is None: world_config['robot_rot'] = self.random_rot() else: world_config['robot_rot'] = float(self.robot_rot) if self.floor_display_mode: floor_size = max(self.placements_extents) world_config['floor_size'] = [floor_size + .1, floor_size + .1, 1] world_config['observe_vision'] = self.observe_vision world_config['objects'] = {} if self.vases_num: for i in range(self.vases_num): name = f'vase{i}' object = {'name': name, 'size': np.ones(3) * self.vases_size, 'type': 'box', 'density': self.vases_density, 'pos': np.r_[self.layout[name], self.vases_size - self.vases_sink], 'rot': self.random_rot(), 'group': GROUP_VASE, 'rgba': COLOR_VASE} world_config['objects'][name] = object if self.gremlins_num: self._gremlins_rots = dict() for i in range(self.gremlins_num): name = f'gremlin{i}obj' self._gremlins_rots[i] = self.random_rot() object = {'name': name, 'size': np.ones(3) * self.gremlins_size, 'type': 'box', 'density': self.gremlins_density, 'pos': np.r_[self.layout[name.replace('obj', '')], self.gremlins_size], 'rot': self._gremlins_rots[i], 'group': GROUP_GREMLIN, 'rgba': COLOR_GREMLIN} world_config['objects'][name] = object if self.task == 'push': object = {'name': 'box', 'type': 'box', 'size': np.ones(3) * self.box_size, 'pos': np.r_[self.layout['box'], self.box_size], 'rot': self.random_rot(), 'density': self.box_density, 'group': GROUP_BOX, 'rgba': COLOR_BOX} world_config['objects']['box'] = object world_config['geoms'] = {} if self.task in ['goal', 'push']: geom = {'name': 'goal', 'size': [self.goal_size, self.goal_size / 2], 'pos': np.r_[self.layout['goal'], self.goal_size / 2 + 1e-2], 'rot': self.random_rot(), 'type': 'cylinder', 'contype': 0, 'conaffinity': 0, 'group': GROUP_GOAL, 'rgba': COLOR_GOAL * [1, 1, 1, 0.25]} world_config['geoms']['goal'] = geom if self.hazards_num: for i in range(self.hazards_num): name = f'hazard{i}' geom = {'name': name, 'size': [self.hazards_size, 1e-2], 'pos': np.r_[self.layout[name], 2e-2], 'rot': self.random_rot(), 'type': 'cylinder', 'contype': 0, 'conaffinity': 0, 'group': GROUP_HAZARD, 'rgba': COLOR_HAZARD * [1, 1, 1, 0.25]} world_config['geoms'][name] = geom if self.pillars_num: for i in range(self.pillars_num): name = f'pillar{i}' geom = {'name': name, 'size': [self.pillars_size, self.pillars_height], 'pos': np.r_[self.layout[name], self.pillars_height], 'rot': self.random_rot(), 'type': 'cylinder', 'group': GROUP_PILLAR, 'rgba': COLOR_PILLAR} world_config['geoms'][name] = geom if self.walls_num: for i in range(self.walls_num): name = f'wall{i}' geom = {'name': name, 'size': np.ones(3) * self.walls_size, 'pos': np.r_[self.layout[name], self.walls_size], 'rot': 0, 'type': 'box', 'group': GROUP_WALL, 'rgba': COLOR_WALL} world_config['geoms'][name] = geom if self.buttons_num: for i in range(self.buttons_num): name = f'button{i}' geom = {'name': name, 'size': np.ones(3) * self.buttons_size, 'pos': np.r_[self.layout[name], self.buttons_size], 'rot': self.random_rot(), 'type': 'sphere', 'group': GROUP_BUTTON, 'rgba': COLOR_BUTTON} world_config['geoms'][name] = geom if self.task == 'circle': geom = {'name': 'circle', 'size': np.array([self.circle_radius, 1e-2]), 'pos': np.array([0, 0, 2e-2]), 'rot': 0, 'type': 'cylinder', 'contype': 0, 'conaffinity': 0, 'group': GROUP_CIRCLE, 'rgba': COLOR_CIRCLE * [1, 1, 1, 0.1]} world_config['geoms']['circle'] = geom world_config['mocaps'] = {} if self.gremlins_num: for i in range(self.gremlins_num): name = f'gremlin{i}mocap' mocap = {'name': name, 'size': np.ones(3) * self.gremlins_size, 'type': 'box', 'pos': np.r_[self.layout[name.replace('mocap', '')], self.gremlins_size], 'rot': self._gremlins_rots[i], 'group': GROUP_GREMLIN, 'rgba': np.array([1, 1, 1, .1]) * COLOR_GREMLIN} world_config['mocaps'][name] = mocap return world_config def clear(self): self.layout = None def build_goal(self): if self.task == 'goal': self.build_goal_position() self.last_dist_goal = self.dist_goal() elif self.task == 'push': self.build_goal_position() self.last_dist_goal = self.dist_goal() self.last_dist_box = self.dist_box() self.last_box_goal = self.dist_box_goal() elif self.task == 'button': assert self.buttons_num > 0, 'Must have at least one button' self.build_goal_button() self.last_dist_goal = self.dist_goal() elif self.task in ['x', 'z']: self.last_robot_com = self.world.robot_com() elif self.task in ['circle', 'none']: pass else: raise ValueError(f'Invalid task {self.task}') def sample_goal_position(self): placements, keepout = self.placements['goal'] goal_xy = self.draw_placement(placements, keepout) for other_name, other_xy in self.layout.items(): other_keepout = self.placements[other_name][1] dist = np.sqrt(np.sum(np.square(goal_xy - other_xy))) if dist < other_keepout + self.placements_margin + keepout: return False self.layout['goal'] = goal_xy return True
MIT License
arelle/arelle
arelle/CntlrWebMain.py
quickbooksGLresponse
python
def quickbooksGLresponse(): from arelle import CntlrQuickBooks ticket = request.query.ticket media = request.query.media viewRequested = request.query.view status = CntlrQuickBooks.qbRequestStatus.get(ticket) if not status: return htmlBody(tableRows([_("QuickBooks ticket not found, request canceled.")], header=_("Quickbooks Request"))) if status.startswith("ConnectionErrorMessage: "): CntlrQuickBooks.qbRequestStatus.pop(ticket, None) return errorReport([status[24:]], media) if status != "Done" or ticket not in CntlrQuickBooks.xbrlInstances: return htmlBody(tableRows([_("{0}, Waiting 20 seconds...").format(status)], header=_("Quickbooks Request")), script=''' <script type="text/javascript"> <!-- var timer = setInterval("autoRefresh()", 1000 * 20); function autoRefresh(){{clearInterval(timer);self.location.reload(true);}} //--> </script> ''') CntlrQuickBooks.qbRequestStatus.pop(ticket) instanceUuid = CntlrQuickBooks.xbrlInstances[ticket] CntlrQuickBooks.xbrlInstances.pop(ticket) options = Options() setattr(options, "entrypointFile", instanceUuid) viewFile = FileNamedStringIO(media) setattr(options, "factsFile", viewFile) return runOptionsAndGetResult(options, media, viewFile)
Poll for QuickBooks protocol responses for *get* requests to */rest/quickbooks/response*. :returns: html, xml, csv, text -- Return per media type argument and request arguments, if response is ready, otherwise javascript to requery this *get* request periodicially.
https://github.com/arelle/arelle/blob/f9b83eb6c95be457c9fe07dda8e3f6207f0ec9af/arelle/CntlrWebMain.py#L478-L512
from arelle.webserver.bottle import Bottle, request, response, static_file from arelle.Cntlr import LogFormatter import os, io, sys, time, threading, uuid, zipfile from arelle import Version from arelle.FileSource import FileNamedStringIO from arelle.PluginManager import pluginClassMethods _os_pid = os.getpid() GETorPOST = ('GET', 'POST') GET = 'GET' POST = 'POST' def startWebserver(_cntlr, options): global imagesDir, cntlr, optionsPrototype cntlr = _cntlr imagesDir = cntlr.imagesDir optionValuesTypes = _STR_NUM_TYPES + (type(None),) optionsPrototype = dict((option,value if isinstance(value,_STR_NUM_TYPES) else None) for option in dir(options) for value in (getattr(options, option),) if isinstance(value,optionValuesTypes) and not option.startswith('_')) host, sep, portServer = options.webserver.partition(":") port, sep, server = portServer.partition(":") app = Bottle() pluginResult = None for pluginMethod in pluginClassMethods("CntlrWebMain.StartWebServer"): pluginResult = pluginMethod(app, cntlr, host, port, server) break if not (isinstance(pluginResult, str) and "skip-routes" in pluginResult): app.route('/rest/login', GET, login_form) app.route('/rest/login', POST, login_submit) app.route('/rest/logout', GET, logout) app.route('/favicon.ico', GET, arelleIcon) app.route('/rest/xbrl/<file:path>/open', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/close', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/validation/xbrl', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/DTS', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/concepts', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/pre', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/table', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/cal', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/dim', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/facts', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/factTable', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/roleTypes', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/arcroleTypes', GETorPOST, validation) app.route('/rest/xbrl/<file:path>/formulae', GETorPOST, validation) app.route('/rest/xbrl/validation', GETorPOST, validation) app.route('/rest/xbrl/view', GETorPOST, validation) app.route('/rest/xbrl/open', GETorPOST, validation) app.route('/rest/xbrl/close', GETorPOST, validation) app.route('/images/<imgFile>', GET, image) app.route('/rest/xbrl/diff', GET, diff) app.route('/rest/configure', GET, configure) app.route('/rest/stopWebServer', GET, stopWebServer) app.route('/quickbooks/server.asmx', POST, quickbooksServer) app.route('/rest/quickbooks/<qbReport>/xbrl-gl/<file:path>', GET, quickbooksGLrequest) app.route('/rest/quickbooks/<qbReport>/xbrl-gl/<file:path>/view', GET, quickbooksGLrequest) app.route('/rest/quickbooks/<qbReport>/xbrl-gl/view', GET, quickbooksGLrequest) app.route('/rest/quickbooks/response', GET, quickbooksGLresponse) app.route('/quickbooks/server.html', GET, quickbooksWebPage) app.route('/quickbooks/localhost.crt', GET, localhostCertificate) app.route('/localhost.crt', GET, localhostCertificate) app.route('/rest/test/test', GETorPOST, testTest) app.route('/help', GET, helpREST) app.route('/about', GET, about) app.route('/', GET, indexPageREST) if server == "cgi": app.route('<cgiAppPath:path>', GETorPOST, cgiInterface) if not (isinstance(pluginResult, str) and "skip-run" in pluginResult): if server == "wsgi": return app elif server == "cgi": if sys.stdin is None: sys.stdin = open(os.devnull, 'r') app.run(server=server) sys.exit(0) elif server: sys.path.insert(0,os.path.join(os.path.dirname(__file__),"webserver")) app.run(host=host, port=port or 80, server=server) else: app.run(host=host, port=port or 80) def cgiInterface(cgiAppPath): if not request.query: return indexPageCGI() elif 'about' in request.query: return about(cgiAppPath + "?image=arelle32.gif") elif 'help' in request.query: return helpREST() elif 'image' in request.query: return image(request.query.image) else: return indexPageCGI() def login_form(): return _('''<html><body><form method="POST"><table> <tr><td>Name:</td><td><input name="name" type="text" /></td></tr> <tr><td>Password:</td><td><input name="password" type="password" /></td></tr> <tr><td>&nbsp;</td><td><input type="submit" value="Submit" /></td></tr> </table></form></body></html>''') def login_submit(): name = request.forms.get('name') password = request.forms.get('password') if checkLogin(name, password): return _("<p>You are logged in as user: {0}</p>").format(name) else: return _("<p>Login failed</p>") def checkLogin(_user, _password): global user user = _user return True def logout(): global user user = None return _("<p>You are logged out.</p>") def arelleIcon(): return static_file("arelle.ico", root=imagesDir, mimetype='image/vnd.microsoft.icon') def image(imgFile): return static_file(imgFile, root=imagesDir) validationOptions = { "efm": ("validateEFM", True), "efm-pragmatic": ("disclosureSystemName", "efm-pragmatic"), "efm-strict": ("disclosureSystemName", "efm-strict"), "efm-all-years": ("disclosureSystemName", "efm-all-years"), "esef": ("disclosureSystemName", "esef"), "disclosure-system": ("disclosureSystemName", None), "ifrs": ("gfmName", "ifrs"), "hmrc": ("gfmName", "hmrc"), "sbr-nl": ("gfmName", "sbr-nl"), "utr": ("utrValidate", True), "infoset": ("infosetValidate", True), "import": ("importFiles", None), } validationKeyVarName = { "disclosureSystem": "disclosureSystemName", "roleTypes": "roleTypesFile", "arcroleTypes": "arcroleTypesFile" } class Options(): def __init__(self): for option, defaultValue in optionsPrototype.items(): setattr(self, option, defaultValue) supportedViews = {'DTS', 'concepts', 'pre', 'table', 'cal', 'dim', 'facts', 'factTable', 'formulae', 'roleTypes', 'arcroleTypes'} def validation(file=None): errors = [] flavor = request.query.flavor or 'standard' media = request.query.media or 'html' requestPathParts = request.urlparts[2].split('/') isValidation = 'validation' == requestPathParts[-1] or 'validation' == requestPathParts[-2] view = request.query.view viewArcrole = request.query.viewArcrole if request.method == 'POST': mimeType = request.get_header("Content-Type") if mimeType.startswith("multipart/form-data"): _upload = request.files.get("upload") if not _upload or not _upload.filename.endswith(".zip"): errors.append(_("POST file upload must be a zip file")) sourceZipStream = None else: sourceZipStream = _upload.file elif mimeType not in ('application/zip', 'application/x-zip', 'application/x-zip-compressed', 'multipart/x-zip'): errors.append(_("POST must provide a zip file, Content-Type '{0}' not recognized as a zip file.").format(mimeType)) sourceZipStream = request.body else: sourceZipStream = None if not view and not viewArcrole: if requestPathParts[-1] in supportedViews: view = requestPathParts[-1] if isValidation: if view or viewArcrole: errors.append(_("Only validation or one view can be specified in one requested.")) if media not in ('xml', 'xhtml', 'html', 'json', 'text', 'zip') and not (sourceZipStream and media == 'zip'): errors.append(_("Media '{0}' is not supported for validation (please select xhtml, html, xml, json or text)").format(media)) elif view or viewArcrole: if media not in ('xml', 'xhtml', 'html', 'csv', 'xlsx', 'json'): errors.append(_("Media '{0}' is not supported for view (please select xhtml, html, xml, csv, xlsx or json)").format(media)) elif requestPathParts[-1] not in ("open", "close"): errors.append(_("Neither validation nor view requested, nothing to do.")) if (flavor not in ('standard', 'standard-except-formula', 'formula-compile-only', 'formula-compile-and-run') and not flavor.startswith('edgar') and not flavor.startswith('sec')): errors.append(_("Flavor '{0}' is not supported").format(flavor)) if view and view not in supportedViews: errors.append(_("View '{0}' is not supported").format(view)) if errors: errors.insert(0, _("URL: ") + (file or request.query.file or '(no file)')) return errorReport(errors, media) options = Options() isFormulaOnly = False for key, value in request.query.items(): if key == "file": setattr(options, "entrypointFile", value) elif key == "flavor": if value.startswith("sec") or value.startswith("edgar"): setattr(options, "validateEFM", True) elif value == "formula-compile-only": isFormulaOnly = True setattr(options, "formulaAction", "validate") elif value == "formula-compile-and-run": isFormulaOnly = True setattr(options, "formulaAction", "run") elif value == "standard-except-formula": setattr(options, "formulaAction", "none") elif key in("media", "view", "viewArcrole"): pass elif key in validationOptions: optionKey, optionValue = validationOptions[key] setattr(options, optionKey, optionValue if optionValue is not None else value) elif key in validationKeyVarName: setattr(options, validationKeyVarName[key], value or True) elif not value: setattr(options, key, True) else: setattr(options, key, value) if file: setattr(options, "entrypointFile", file.replace(';','/')) requestPathParts = set(request.urlparts[2].split('/')) viewFile = None if isValidation: if not isFormulaOnly: setattr(options, "validate", True) elif view: viewFile = FileNamedStringIO(media) setattr(options, view + "File", viewFile) elif viewArcrole: viewFile = FileNamedStringIO(media) setattr(options, "viewArcrole", viewArcrole) setattr(options, "viewFile", viewFile) return runOptionsAndGetResult(options, media, viewFile, sourceZipStream) def runOptionsAndGetResult(options, media, viewFile, sourceZipStream=None): addLogToZip = False if media == "zip" and not viewFile: responseZipStream = io.BytesIO() if (hasattr(options, "saveOIMinstance") or (getattr(options, "entrypointFile", "") or "").rpartition(".")[2] in ("json", "csv", "xlsx")): plugins = (getattr(options, "plugins", "") or "").split("|") if getattr(options, "entrypointFile", "").rpartition(".")[2] in ("json", "csv", "xlsx"): if "loadFromOIM" not in plugins: plugins.append("loadFromOIM") addLogToZip = True if getattr(options, "saveOIMinstance", "").rpartition(".")[2] in ("json", "csv", "xlsx"): if "saveLoadableOIM" not in plugins: plugins.append("saveLoadableOIM") addLogToZip = True setattr(options, "saveLoadableOIM", getattr(options, "saveOIMinstance")) setattr(options, "saveOIMinstance", None) setattr(options, "plugins", "|".join(p for p in plugins if p) or None) else: responseZipStream = None successful = cntlr.run(options, sourceZipStream, responseZipStream) if media == "xml": response.content_type = 'text/xml; charset=UTF-8' elif media == "csv": response.content_type = 'text/csv; charset=UTF-8' elif media == "json": response.content_type = 'application/json; charset=UTF-8' elif media == "text": response.content_type = 'text/plain; charset=UTF-8' elif media == "zip": response.content_type = 'application/zip; charset=UTF-8' else: response.content_type = 'text/html; charset=UTF-8' if successful and viewFile: result = viewFile.getvalue().replace("&nbsp;","\u00A0").replace("&shy;","\u00AD").replace("&amp;","&") viewFile.close() elif media == "zip": responseZipStream.seek(0) if addLogToZip: _zip = zipfile.ZipFile(responseZipStream, "a", zipfile.ZIP_DEFLATED, True) _zip.writestr("log.txt", cntlr.logHandler.getText()) _zip.close() responseZipStream.seek(0) result = responseZipStream.read() responseZipStream.close() cntlr.logHandler.clearLogBuffer() elif media == "xml": result = cntlr.logHandler.getXml() elif media == "json": result = cntlr.logHandler.getJson() elif media == "text": _logFormat = request.query.logFormat if _logFormat: _stdLogFormatter = cntlr.logHandler.formatter cntlr.logHandler.formatter = LogFormatter(_logFormat) result = cntlr.logHandler.getText() if _logFormat: cntlr.logHandler.formatter = _stdLogFormatter del _stdLogFormatter else: result = htmlBody(tableRows(cntlr.logHandler.getLines(), header=_("Messages"))) return result def diff(): if not request.query.fromDTS or not request.query.toDTS or not request.query.report: return _("From DTS, to DTS, and report must be specified") options = Options() setattr(options, "entrypointFile", request.query.fromDTS) setattr(options, "diffFile", request.query.toDTS) fh = FileNamedStringIO(request.query.report) setattr(options, "versReportFile", fh) cntlr.run(options) reportContents = fh.getvalue() fh.close() response.content_type = 'text/xml; charset=UTF-8' return reportContents def configure(): if not request.query.proxy and not request.query.plugins and not request.query.packages and 'environment' not in request.query: return _("proxy, plugins, packages or environment must be specified") options = Options() if request.query.proxy: setattr(options, "proxy", request.query.proxy) if request.query.plugins: setattr(options, "plugins", request.query.plugins) if request.query.packages: setattr(options, "packages", request.query.packages) if 'environment' in request.query: setattr(options, "showEnvironment", True) cntlr.run(options) response.content_type = 'text/html; charset=UTF-8' return htmlBody(tableRows(cntlr.logHandler.getLines(), header=_("Configuration Request"))) def stopWebServer(): def stopSoon(delaySeconds): time.sleep(delaySeconds) import signal os.kill(_os_pid, signal.SIGTERM) threading.Thread(target=stopSoon, args=(2.5,), daemon=True).start() response.content_type = 'text/html; charset=UTF-8' return htmlBody(tableRows((time.strftime("Received at %Y-%m-%d %H:%M:%S"), "Good bye...",), header=_("Stop Request"))) def testTest(): return "Results attached:\n" + multipartResponse(( ("file1", "text/plain", "test text 1"), ("file2", "text/plain", "test text 2"), ("file3", "text/plain", "test text 3"), )) def quickbooksServer(): from arelle import CntlrQuickBooks response.content_type = 'text/xml; charset=UTF-8' return CntlrQuickBooks.server(cntlr, request.body, request.urlparts) def quickbooksGLrequest(qbReport=None, file=None): from arelle.CntlrQuickBooks import supportedQbReports, qbRequest from arelle.ModelValue import dateTime errors = [] requestPathParts = request.urlparts[2].split('/') viewRequested = "view" == requestPathParts[-1] media = request.query.media or 'html' fromDate = request.query.fromDate toDate = request.query.toDate if qbReport not in supportedQbReports: errors.append(_("QuickBooks report '{0}' is not supported (please select from: {1})").format( qbReport, ', '.join(supportedQbReports))) if media not in ('xml', 'xhtml', 'html'): errors.append(_("Media '{0}' is not supported for xbrl-gl (please select xhtml, html or xml)").format(media)) if not fromDate or dateTime(fromDate) is None: errors.append(_("FromDate '{0}' missing or not valid").format(fromDate)) if not toDate or dateTime(toDate) is None: errors.append(_("ToDate '{0}' missing or not valid").format(toDate)) if errors: return errorReport(errors, media) ticket = qbRequest(qbReport, fromDate, toDate, file) result = htmlBody(tableRows([_("Request queued for QuickBooks...")], header=_("Quickbooks Request")), script=''' <script type="text/javascript"> <!-- var timer = setInterval("autoRefresh()", 1000 * 10); function autoRefresh(){{location.href = "/rest/quickbooks/response?ticket={0}&media={1}&view={2}";}} //--> </script> '''.format(ticket, media, viewRequested)) return result
Apache License 2.0
jupyterhub/the-littlest-jupyterhub
tljh/user.py
remove_group
python
def remove_group(groupname): try: grp.getgrnam(groupname) except KeyError: return subprocess.check_call([ 'delgroup', '--quiet', groupname ])
Remove group from system if exists
https://github.com/jupyterhub/the-littlest-jupyterhub/blob/75a5724292a0048b6b6d940a4a82c087a3ecb238/tljh/user.py#L72-L86
import grp import pwd import subprocess from os.path import expanduser from tljh.utils import get_plugin_manager def ensure_user(username): try: pwd.getpwnam(username) return except KeyError: pass subprocess.check_call([ 'useradd', '--create-home', username ]) subprocess.check_call([ 'chmod', 'o-rwx', expanduser('~{username}'.format(username=username)) ]) pm = get_plugin_manager() pm.hook.tljh_new_user_create(username=username) def remove_user(username): try: pwd.getpwnam(username) except KeyError: return subprocess.check_call([ 'deluser', '--quiet', username ]) def ensure_group(groupname): subprocess.check_call([ 'groupadd', '--force', groupname ])
BSD 3-Clause New or Revised License
facelessuser/pyspelling
pyspelling/filters/text.py
get_plugin
python
def get_plugin(): return TextFilter
Return the filter.
https://github.com/facelessuser/pyspelling/blob/3edf5857c972970a607df77ad5cc14fc01ab3949/pyspelling/filters/text.py#L83-L86
from .. import filters import codecs import unicodedata class TextFilter(filters.Filter): def __init__(self, options, default_encoding='utf-8'): super().__init__(options, default_encoding) def get_default_config(self): return { 'normalize': '', 'convert_encoding': '', 'errors': 'strict' } def validate_options(self, k, v): super().validate_options(k, v) if k == 'errors' and v.lower() not in ('strict', 'replace', 'ignore', 'backslashreplace'): raise ValueError("{}: '{}' is not a valid value for '{}'".format(self.__class__.__name, v, k)) if k == 'normalize' and v.upper() not in ('NFC', 'NFKC', 'NFD', 'NFKD'): raise ValueError("{}: '{}' is not a valid value for '{}'".format(self.__class__.__name, v, k)) def setup(self): self.normalize = self.config['normalize'].upper() self.convert_encoding = self.config['convert_encoding'].lower() self.errors = self.config['errors'].lower() if self.convert_encoding: self.convert_encoding = codecs.lookup( filters.PYTHON_ENCODING_NAMES.get(self.default_encoding, self.default_encoding).lower() ).name if ( self.convert_encoding.startswith(('utf-32', 'utf-16')) and not self.convert_encoding.endswith(('le', 'be')) ): self.convert_encoding += '-le' if self.convert_encoding == 'utf-8-sig': self.convert_encoding = 'utf-8' def convert(self, text, encoding): if self.normalize in ('NFC', 'NFKC', 'NFD', 'NFKD'): text = unicodedata.normalize(self.normalize, text) if self.convert_encoding: text = text.encode(self.convert_encoding, self.errors).decode(self.convert_encoding) encoding = self.convert_encoding return text, encoding def filter(self, source_file, encoding): with codecs.open(source_file, 'r', encoding=encoding) as f: text = f.read() text, encoding = self.convert(text, encoding) return [filters.SourceText(text, source_file, encoding, 'text')] def sfilter(self, source): text, encoding = self.convert(source.text, source.encoding) return [filters.SourceText(text, source.context, encoding, 'text')]
MIT License
esss/qmxgraph
src/qmxgraph/api.py
QmxGraphApi.has_tag
python
def has_tag(self, cell_id, tag_name): return self.call_api('hasTag', cell_id, tag_name)
If cell has tag. :param str cell_id: Id of a cell in graph. :param str tag_name: Name of tag. :rtype: bool :return: True if tag exists in cell.
https://github.com/esss/qmxgraph/blob/e9f2d26b95012843ad31ae71b0b0864e1ba31536/src/qmxgraph/api.py#L770-L779
import sys import textwrap import weakref from contextlib import contextmanager from typing import Any from typing import Generator from typing import List import qmxgraph.debug import qmxgraph.js from qmxgraph.exceptions import InvalidJavaScriptError class QmxGraphApi(object): SOURCE_TERMINAL_CELL = 'source' TARGET_TERMINAL_CELL = 'target' LAYOUT_ORGANIC = 'organic' LAYOUT_COMPACT = 'compact' LAYOUT_CIRCLE = 'circle' LAYOUT_COMPACT_TREE = 'compact_tree' LAYOUT_EDGE_LABEL = 'edge_label' LAYOUT_PARALLEL_EDGE = 'parallel_edge' LAYOUT_PARTITION = 'partition' LAYOUT_RADIAL_TREE = 'radial_tree' LAYOUT_STACK = 'stack' def __init__(self, graph, call_context_manager_factory): self._graph = weakref.ref(graph) self._call_context_manager_factory = call_context_manager_factory def insert_vertex(self, x, y, width, height, label, style=None, tags=None, id=None): return self.call_api('insertVertex', x, y, width, height, label, style, tags, id) def insert_port( self, vertex_id, port_name, x, y, width, height, label=None, style=None, tags=None ): return self.call_api( 'insertPort', vertex_id, port_name, x, y, width, height, label, style, tags ) def insert_edge( self, source_id, target_id, label, style=None, tags=None, source_port_name=None, target_port_name=None, id=None, ): return self.call_api( 'insertEdge', source_id, target_id, label, style, tags, source_port_name, target_port_name, id, ) def insert_decoration(self, x, y, width, height, label, style=None, tags=None, id=None): return self.call_api('insertDecoration', x, y, width, height, label, style, tags, id) def insert_decoration_on_edge( self, edge_id, position, width, height, label, style=None, tags=None, id=None ): return self.call_api( 'insertDecorationOnEdge', edge_id, position, width, height, label, style, tags, id ) def insert_table( self, x, y, width, contents, title, tags=None, style=None, parent_id=None, id=None ): from . import decoration_contents contents = decoration_contents.asdict(contents) return self.call_api( 'insertTable', x, y, width, contents, title, tags, style, parent_id, id ) def update_table(self, table_id, contents, title): from . import decoration_contents contents = decoration_contents.asdict(contents) self.call_api_async('updateTable', table_id, contents, title) def update_port( self, vertex_id, port_name, x=None, y=None, width=None, height=None, label=None, style=None, tags=None, ): self.call_api_async( 'updatePort', vertex_id, port_name, x, y, width, height, label, style, tags ) def get_port_names(self, vertex_id): return self.call_api('getPortNames', vertex_id) def group(self): return self.call_api('group') def ungroup(self): return self.call_api('ungroup') def toggle_outline(self): self.call_api_async('toggleOutline') def toggle_grid(self): self.call_api_async('toggleGrid') def toggle_snap(self): self.call_api_async('toggleSnap') def get_cell_id_at(self, x, y): return self.call_api('getCellIdAt', x, y) def get_decoration_parent_cell_id(self, cell_id): return self.call_api('getDecorationParentCellId', cell_id) def has_cell(self, cell_id): return self.call_api('hasCell', cell_id) def has_port(self, cell_id, port_name): return self.call_api('hasPort', cell_id, port_name) def get_cell_type(self, cell_id): return self.call_api('getCellType', cell_id) def get_geometry(self, cell_id): return self.call_api('getGeometry', cell_id) def get_terminal_points(self, cell_id): return self.call_api('getEdgeTerminalPoints', cell_id) def get_decoration_position(self, cell_id): return self.call_api('getDecorationPosition', cell_id) def set_decoration_position(self, cell_id, position): return self.call_api('setDecorationPosition', cell_id, position) def set_visible(self, cell_id, visible): return self.call_api('setVisible', cell_id, visible) def is_visible(self, cell_id): return self.call_api('isVisible', cell_id) def set_port_visible(self, cell_id, port_name, visible): return self.call_api('setPortVisible', cell_id, port_name, visible) def is_port_visible(self, cell_id, port_name): return self.call_api('isPortVisible', cell_id, port_name) def set_connectable(self, cell_id, connectable): self.call_api('setConnectable', cell_id, connectable) def is_connectable(self, cell_id): return self.call_api('isConnectable', cell_id) def zoom_in(self): self.call_api_async('zoomIn') def zoom_out(self): self.call_api_async('zoomOut') def reset_zoom(self): self.call_api_async('resetZoom') def fit(self): self.call_api_async('fit') def get_zoom_scale(self): return self.call_api('getZoomScale') def get_scale_and_translation(self): return tuple(self.call_api('getScaleAndTranslation')) def set_scale_and_translation(self, scale, x, y): return self.call_api('setScaleAndTranslation', scale, x, y) def set_selected_cells(self, cell_ids): return self.call_api('setSelectedCells', cell_ids) def get_selected_cells(self): return self.call_api('getSelectedCells') def remove_cells(self, cell_ids, ignore_missing_cells=False): return self.call_api('removeCells', cell_ids, ignore_missing_cells) def remove_port(self, vertex_id, port_name): return self.call_api('removePort', vertex_id, port_name) def register_double_click_handler(self, handler): return self.call_api('registerDoubleClickHandler', qmxgraph.js.Variable(handler)) def register_popup_menu_handler(self, handler): return self.call_api('registerPopupMenuHandler', qmxgraph.js.Variable(handler)) def register_label_changed_handler(self, handler): return self.call_api('registerLabelChangedHandler', qmxgraph.js.Variable(handler)) def register_cells_added_handler(self, handler): return self.call_api('registerCellsAddedHandler', qmxgraph.js.Variable(handler)) def register_cells_removed_handler(self, handler): return self.call_api('registerCellsRemovedHandler', qmxgraph.js.Variable(handler)) def register_selection_changed_handler(self, handler): return self.call_api('registerSelectionChangedHandler', qmxgraph.js.Variable(handler)) def register_terminal_changed_handler(self, handler): return self.call_api('registerTerminalChangedHandler', qmxgraph.js.Variable(handler)) def register_terminal_with_port_changed_handler(self, handler): return self.call_api( 'registerTerminalWithPortChangedHandler', qmxgraph.js.Variable(handler), ) def register_view_update_handler(self, handler): return self.call_api('registerViewUpdateHandler', qmxgraph.js.Variable(handler)) def register_cells_bounds_changed_handler(self, handler): return self.call_api('registerBoundsChangedHandler', qmxgraph.js.Variable(handler)) def resize_container(self, width, height): self.call_api_async('resizeContainer', width, height) def get_label(self, cell_id): return self.call_api('getLabel', cell_id) def set_label(self, cell_id, label): return self.call_api('setLabel', cell_id, label) def set_style(self, cell_id, style): return self.call_api('setStyle', cell_id, style) def get_style(self, cell_id): return self.call_api('getStyle', cell_id) def set_tag(self, cell_id, tag_name, tag_value): return self.call_api('setTag', cell_id, tag_name, tag_value) def get_tag(self, cell_id, tag_name): return self.call_api('getTag', cell_id, tag_name)
MIT License
dmitriy-serdyuk/twinnet-asr
libs/Theano/theano/sparse/basic.py
csm_indices
python
def csm_indices(csm): return csm_properties(csm)[1]
Return the indices field of the sparse variable.
https://github.com/dmitriy-serdyuk/twinnet-asr/blob/799220d682306467a2b401e42e788f8c33382b00/libs/Theano/theano/sparse/basic.py#L615-L620
from __future__ import print_function import sys import numpy from numpy.lib.stride_tricks import as_strided from six.moves import xrange import scipy.sparse import theano from theano import gof, tensor, scalar, config from theano.gradient import DisconnectedType from theano.sparse.utils import hash_from_sparse import theano.tests.unittest_tools as utt from theano.gradient import grad_not_implemented, grad_undefined from theano.sparse.type import SparseType, _is_sparse sparse_formats = ['csc', 'csr'] _mtypes = [scipy.sparse.csc_matrix, scipy.sparse.csr_matrix] _mtype_to_str = {scipy.sparse.csc_matrix: "csc", scipy.sparse.csr_matrix: "csr"} def _is_sparse_variable(x): if not isinstance(x, gof.Variable): raise NotImplementedError("this function should only be called on " "*variables* (of type sparse.SparseType " "or tensor.TensorType, for instance), not ", x) return isinstance(x.type, SparseType) def _is_dense_variable(x): if not isinstance(x, gof.Variable): raise NotImplementedError("this function should only be called on " "*variables* (of type sparse.SparseType or " "tensor.TensorType, for instance), not ", x) return isinstance(x.type, tensor.TensorType) def _is_dense(x): if not isinstance(x, (scipy.sparse.spmatrix, numpy.ndarray)): raise NotImplementedError("this function should only be called on " "sparse.scipy.sparse.spmatrix or " "numpy.ndarray, not,", x) return isinstance(x, numpy.ndarray) def _kmap_eq(a, b): if a is None and b is None: return True if a is None or b is None: return False return numpy.all(a == b) def _kmap_hash(a): if a is None: return 12345 return hash(numpy.str(a)) def as_sparse_variable(x, name=None): if isinstance(x, gof.Apply): if len(x.outputs) != 1: raise ValueError("It is ambiguous which output of a " "multi-output Op has to be fetched.", x) else: x = x.outputs[0] if isinstance(x, gof.Variable): if not isinstance(x.type, SparseType): raise TypeError("Variable type field must be a SparseType.", x, x.type) return x try: return constant(x, name=name) except TypeError: raise TypeError("Cannot convert %s to SparseType" % x, type(x)) as_sparse = as_sparse_variable def as_sparse_or_tensor_variable(x, name=None): try: return as_sparse_variable(x, name) except (ValueError, TypeError): return theano.tensor.as_tensor_variable(x, name) def verify_grad_sparse(op, pt, structured=False, *args, **kwargs): def conv_none(x): return x def conv_csr(ind, indptr, shp): def f(spdata): return CSR(spdata, ind, indptr, shp) return f def conv_csc(ind, indptr, shp): def f(spdata): return CSC(spdata, ind, indptr, shp) return f iconv = [] dpt = [] for p in pt: if _is_sparse(p): if structured: dpt.append(p.data) else: dpt.append(p.toarray()) if p.format == 'csr': if structured: iconv.append(conv_csr(p.indices[:p.size], p.indptr, p.shape)) else: iconv.append(csr_from_dense) elif p.format == 'csc': if structured: iconv.append(conv_csc(p.indices[:p.size], p.indptr, p.shape)) else: iconv.append(csc_from_dense) else: raise NotImplementedError("No conv for %s" % (p.format,)) else: dpt.append(p) iconv.append(conv_none) output = op(*[as_sparse_or_tensor_variable(p) for p in pt]) if isinstance(output, (list, tuple)): raise NotImplementedError("verify_grad can't deal with " "multiple outputs") if _is_sparse_variable(output): oconv = DenseFromSparse(structured=structured) else: oconv = conv_none def conv_op(*inputs): ipt = [conv(i) for i, conv in zip(inputs, iconv)] out = op(*ipt) return oconv(out) return utt.verify_grad(conv_op, dpt, *args, **kwargs) verify_grad_sparse.E_grad = utt.verify_grad.E_grad def constant(x, name=None): if not isinstance(x, scipy.sparse.spmatrix): raise TypeError("sparse.constant must be called on a " "scipy.sparse.spmatrix") try: return SparseConstant(SparseType(format=x.format, dtype=x.dtype), x.copy(), name=name) except TypeError: raise TypeError("Could not convert %s to SparseType" % x, type(x)) def sp_ones_like(x): data, indices, indptr, shape = csm_properties(x) return CSM(format=x.format)(tensor.ones_like(data), indices, indptr, shape) def sp_zeros_like(x): _, _, indptr, shape = csm_properties(x) return CSM(format=x.format)(data=numpy.array([], dtype=x.type.dtype), indices=numpy.array([], dtype='int32'), indptr=tensor.zeros_like(indptr), shape=shape) class _sparse_py_operators: T = property(lambda self: transpose(self), doc="Return aliased transpose of self (read-only)") def astype(self, dtype): return cast(self, dtype) def __neg__(self): return neg(self) def __add__(left, right): return add(left, right) def __radd__(right, left): return add(left, right) def __sub__(left, right): return sub(left, right) def __rsub__(right, left): return sub(left, right) def __mul__(left, right): return mul(left, right) def __rmul__(left, right): return mul(left, right) def __lt__(self, other): return lt(self, other) def __le__(self, other): return le(self, other) def __gt__(self, other): return gt(self, other) def __ge__(self, other): return ge(self, other) def __dot__(left, right): return structured_dot(left, right) def __rdot__(right, left): return structured_dot(left, right) def toarray(self): return dense_from_sparse(self) shape = property(lambda self: tensor.shape(dense_from_sparse(self))) ndim = property(lambda self: self.type.ndim) dtype = property(lambda self: self.type.dtype) size = property(lambda self: csm_data(self).size) def zeros_like(model): return sp_zeros_like(model) def __getitem__(self, args): if not isinstance(args, tuple): args = args, if len(args) == 2: scalar_arg_1 = (numpy.isscalar(args[0]) or getattr(args[0], 'type', None) == tensor.iscalar) scalar_arg_2 = (numpy.isscalar(args[1]) or getattr(args[1], 'type', None) == tensor.iscalar) if scalar_arg_1 and scalar_arg_2: ret = get_item_scalar(self, args) elif isinstance(args[0], list): ret = get_item_2lists(self, args[0], args[1]) else: ret = get_item_2d(self, args) elif isinstance(args[0], list): ret = get_item_list(self, args[0]) else: ret = get_item_2d(self, args) return ret class SparseVariable(_sparse_py_operators, gof.Variable): dtype = property(lambda self: self.type.dtype) format = property(lambda self: self.type.format) def __str__(self): return '%s{%s,%s}' % ( self.__class__.__name__, self.format, self.dtype) def __repr__(self): return str(self) class SparseConstantSignature(tuple): def __eq__(self, other): (a, b), (x, y) = self, other return (a == x and (b.dtype == y.dtype) and (type(b) == type(y)) and (b.shape == y.shape) and (abs(b - y).sum() < 1e-6 * b.nnz)) def __hash__(self): (a, b) = self return hash(type(self)) ^ hash(a) ^ hash(type(b)) def theano_hash(self): (_, d) = self return hash_from_sparse(d) class SparseConstant(gof.Constant, _sparse_py_operators): dtype = property(lambda self: self.type.dtype) format = property(lambda self: self.type.format) def signature(self): assert self.data is not None return SparseConstantSignature((self.type, self.data)) def __str__(self): return '%s{%s,%s,shape=%s,nnz=%s}' % ( self.__class__.__name__, self.format, self.dtype, self.data.shape, self.data.nnz) def __repr__(self): return str(self) SparseType.Variable = SparseVariable SparseType.Constant = SparseConstant def matrix(format, name=None, dtype=None): if dtype is None: dtype = config.floatX type = SparseType(format=format, dtype=dtype) return type(name) def csc_matrix(name=None, dtype=None): return matrix('csc', name, dtype) def csr_matrix(name=None, dtype=None): return matrix('csr', name, dtype) def bsr_matrix(name=None, dtype=None): return matrix('bsr', name, dtype) csc_dmatrix = SparseType(format='csc', dtype='float64') csr_dmatrix = SparseType(format='csr', dtype='float64') bsr_dmatrix = SparseType(format='bsr', dtype='float64') csc_fmatrix = SparseType(format='csc', dtype='float32') csr_fmatrix = SparseType(format='csr', dtype='float32') bsr_fmatrix = SparseType(format='bsr', dtype='float32') all_dtypes = SparseType.dtype_set complex_dtypes = [t for t in all_dtypes if t[:7] == 'complex'] float_dtypes = [t for t in all_dtypes if t[:5] == 'float'] int_dtypes = [t for t in all_dtypes if t[:3] == 'int'] uint_dtypes = [t for t in all_dtypes if t[:4] == 'uint'] continuous_dtypes = complex_dtypes + float_dtypes discrete_dtypes = int_dtypes + uint_dtypes class CSMProperties(gof.Op): view_map = {0: [0], 1: [0], 2: [0]} kmap = None def __init__(self, kmap=None): self.kmap = kmap def __eq__(self, other): return type(self) == type(other) and _kmap_eq(self.kmap, other.kmap) def __hash__(self): return 8234 ^ hash(type(self)) ^ _kmap_hash(self.kmap) def __str__(self): return "%s{%s}" % ( self.__class__.__name__, self.kmap) def make_node(self, csm): csm = as_sparse_variable(csm) assert csm.format in ["csr", "csc"] data = tensor.TensorType(dtype=csm.type.dtype, broadcastable=(False,))() return gof.Apply(self, [csm], [data, tensor.ivector(), tensor.ivector(), tensor.ivector()]) def perform(self, node, inputs, out): (csm,) = inputs if self.kmap is None: out[0][0] = csm.data else: out[0][0] = csm.data[self.kmap] if str(csm.data.dtype) == 'int32': out[0][0] = theano._asarray(out[0][0], dtype='int32') out[1][0] = theano._asarray(csm.indices, dtype='int32') out[2][0] = theano._asarray(csm.indptr, dtype='int32') out[3][0] = theano._asarray(csm.shape, dtype='int32') def grad(self, inputs, g): (csm,) = inputs if isinstance(g[0].type, DisconnectedType): return [csm.zeros_like()] data, indices, indptr, shape = csm_properties(csm) return [CSM(csm.format)(g[0], indices, indptr, shape)] csm_properties = CSMProperties() def csm_data(csm): return csm_properties(csm)[0]
MIT License
square/bionic
bionic/deps/optdep.py
import_optional_dependency
python
def import_optional_dependency(name, purpose=None, raise_on_missing=True): if name not in extras_by_importable_name: raise AssertionError( oneline( f""" Attempted to import {name!r}, which is not registered as a dependency""" ) ) try: return importlib.import_module(name) except ImportError: if raise_on_missing: extra_name = extras_by_importable_name[name] if purpose is None: description = "required" else: description = "required for " + purpose raise ImportError( oneline( f""" Unable to import package {name!r}, which is {description}; you can use ``pip install 'bionic[{extra_name}]'`` to resolve this""" ) ) else: return None
Attempts to import a Python module that may or may not be available. If it's not available, this function throws an ImportError explaining what the user needs to install. (Unless ``raise_on_missing`` is set to False, in which case it returns None.)
https://github.com/square/bionic/blob/357da8e2806996427e0aa6efd08f7ea8c5198f9b/bionic/deps/optdep.py#L67-L108
import re import importlib from .extras import extras_require as package_desc_lists_by_extra from ..utils.misc import oneline ILLEGAL_NAME_CHAR = re.compile("[^a-zA-Z0-9\\-._\\[\\]]") def first_token_from_package_desc(desc): first_mismatch = ILLEGAL_NAME_CHAR.search(desc) if first_mismatch is None: return desc if desc[first_mismatch.start()] not in " <>!=": raise AssertionError( oneline( f""" Package descriptor {desc!r} contained unexpected character {desc[first_mismatch.start()]!r}""" ) ) return desc[: first_mismatch.start()] alias_lists_by_package = { "google-cloud-logging": ["google.cloud.logging"], "google-auth": ["google.auth"], "Pillow": ["PIL.Image"], "dask[dataframe]": ["dask.dataframe"], "google-api-python-client": ["googleapiclient.discovery"], } extras_by_importable_name = {} for extra, package_descs in package_desc_lists_by_extra.items(): for package_desc in package_descs: package = first_token_from_package_desc(package_desc) if package not in extras_by_importable_name: extras_by_importable_name[package] = extra if package in alias_lists_by_package: for importable_name in alias_lists_by_package[package]: assert importable_name not in extras_by_importable_name extras_by_importable_name[importable_name] = extra TEST_EXTRA_NAME = "_FAKE_TEST_EXTRA_" TEST_PACKAGE_NAME = "_FAKE_TEST_PACKAGE_" extras_by_importable_name[TEST_PACKAGE_NAME] = TEST_EXTRA_NAME
Apache License 2.0
docusign/docusign-python-client
docusign_esign/models/envelope_update_summary.py
EnvelopeUpdateSummary.bulk_envelope_status
python
def bulk_envelope_status(self): return self._bulk_envelope_status
Gets the bulk_envelope_status of this EnvelopeUpdateSummary. # noqa: E501 :return: The bulk_envelope_status of this EnvelopeUpdateSummary. # noqa: E501 :rtype: BulkEnvelopeStatus
https://github.com/docusign/docusign-python-client/blob/c6aeafff0d046fa6c10a398be83ba9e24b05d4ea/docusign_esign/models/envelope_update_summary.py#L87-L94
import pprint import re import six from docusign_esign.client.configuration import Configuration class EnvelopeUpdateSummary(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'bulk_envelope_status': 'BulkEnvelopeStatus', 'envelope_id': 'str', 'error_details': 'ErrorDetails', 'list_custom_field_update_results': 'list[ListCustomField]', 'lock_information': 'LockInformation', 'purge_state': 'str', 'recipient_update_results': 'list[RecipientUpdateResponse]', 'tab_update_results': 'Tabs', 'text_custom_field_update_results': 'list[TextCustomField]' } attribute_map = { 'bulk_envelope_status': 'bulkEnvelopeStatus', 'envelope_id': 'envelopeId', 'error_details': 'errorDetails', 'list_custom_field_update_results': 'listCustomFieldUpdateResults', 'lock_information': 'lockInformation', 'purge_state': 'purgeState', 'recipient_update_results': 'recipientUpdateResults', 'tab_update_results': 'tabUpdateResults', 'text_custom_field_update_results': 'textCustomFieldUpdateResults' } def __init__(self, _configuration=None, **kwargs): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._bulk_envelope_status = None self._envelope_id = None self._error_details = None self._list_custom_field_update_results = None self._lock_information = None self._purge_state = None self._recipient_update_results = None self._tab_update_results = None self._text_custom_field_update_results = None self.discriminator = None setattr(self, "_{}".format('bulk_envelope_status'), kwargs.get('bulk_envelope_status', None)) setattr(self, "_{}".format('envelope_id'), kwargs.get('envelope_id', None)) setattr(self, "_{}".format('error_details'), kwargs.get('error_details', None)) setattr(self, "_{}".format('list_custom_field_update_results'), kwargs.get('list_custom_field_update_results', None)) setattr(self, "_{}".format('lock_information'), kwargs.get('lock_information', None)) setattr(self, "_{}".format('purge_state'), kwargs.get('purge_state', None)) setattr(self, "_{}".format('recipient_update_results'), kwargs.get('recipient_update_results', None)) setattr(self, "_{}".format('tab_update_results'), kwargs.get('tab_update_results', None)) setattr(self, "_{}".format('text_custom_field_update_results'), kwargs.get('text_custom_field_update_results', None)) @property
MIT License
argoproj-labs/argo-client-python
argo/workflows/client/models/v1alpha1_workflow_template_spec.py
V1alpha1WorkflowTemplateSpec.host_aliases
python
def host_aliases(self): return self._host_aliases
Gets the host_aliases of this V1alpha1WorkflowTemplateSpec. # noqa: E501 :return: The host_aliases of this V1alpha1WorkflowTemplateSpec. # noqa: E501 :rtype: list[V1HostAlias]
https://github.com/argoproj-labs/argo-client-python/blob/993d684cab39a834770b296e028519cec035c7b5/argo/workflows/client/models/v1alpha1_workflow_template_spec.py#L438-L445
import pprint import re import six from argo.workflows.client.configuration import Configuration class V1alpha1WorkflowTemplateSpec(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'active_deadline_seconds': 'int', 'affinity': 'V1Affinity', 'arguments': 'V1alpha1Arguments', 'artifact_repository_ref': 'V1alpha1ArtifactRepositoryRef', 'automount_service_account_token': 'bool', 'dns_config': 'V1PodDNSConfig', 'dns_policy': 'str', 'entrypoint': 'str', 'executor': 'V1alpha1ExecutorConfig', 'host_aliases': 'list[V1HostAlias]', 'host_network': 'bool', 'image_pull_secrets': 'list[V1LocalObjectReference]', 'metrics': 'V1alpha1Metrics', 'node_selector': 'dict(str, str)', 'on_exit': 'str', 'parallelism': 'int', 'pod_disruption_budget': 'IoK8sApiPolicyV1beta1PodDisruptionBudgetSpec', 'pod_gc': 'V1alpha1PodGC', 'pod_priority': 'int', 'pod_priority_class_name': 'str', 'pod_spec_patch': 'str', 'priority': 'int', 'retry_strategy': 'V1alpha1RetryStrategy', 'scheduler_name': 'str', 'security_context': 'V1PodSecurityContext', 'service_account_name': 'str', 'shutdown': 'str', 'suspend': 'bool', 'synchronization': 'V1alpha1Synchronization', 'templates': 'list[V1alpha1Template]', 'tolerations': 'list[V1Toleration]', 'ttl_seconds_after_finished': 'int', 'ttl_strategy': 'V1alpha1TTLStrategy', 'volume_claim_gc': 'V1alpha1VolumeClaimGC', 'volume_claim_templates': 'list[V1PersistentVolumeClaim]', 'volumes': 'list[V1Volume]', 'workflow_metadata': 'V1ObjectMeta', 'workflow_template_ref': 'V1alpha1WorkflowTemplateRef' } attribute_map = { 'active_deadline_seconds': 'activeDeadlineSeconds', 'affinity': 'affinity', 'arguments': 'arguments', 'artifact_repository_ref': 'artifactRepositoryRef', 'automount_service_account_token': 'automountServiceAccountToken', 'dns_config': 'dnsConfig', 'dns_policy': 'dnsPolicy', 'entrypoint': 'entrypoint', 'executor': 'executor', 'host_aliases': 'hostAliases', 'host_network': 'hostNetwork', 'image_pull_secrets': 'imagePullSecrets', 'metrics': 'metrics', 'node_selector': 'nodeSelector', 'on_exit': 'onExit', 'parallelism': 'parallelism', 'pod_disruption_budget': 'podDisruptionBudget', 'pod_gc': 'podGC', 'pod_priority': 'podPriority', 'pod_priority_class_name': 'podPriorityClassName', 'pod_spec_patch': 'podSpecPatch', 'priority': 'priority', 'retry_strategy': 'retryStrategy', 'scheduler_name': 'schedulerName', 'security_context': 'securityContext', 'service_account_name': 'serviceAccountName', 'shutdown': 'shutdown', 'suspend': 'suspend', 'synchronization': 'synchronization', 'templates': 'templates', 'tolerations': 'tolerations', 'ttl_seconds_after_finished': 'ttlSecondsAfterFinished', 'ttl_strategy': 'ttlStrategy', 'volume_claim_gc': 'volumeClaimGC', 'volume_claim_templates': 'volumeClaimTemplates', 'volumes': 'volumes', 'workflow_metadata': 'workflowMetadata', 'workflow_template_ref': 'workflowTemplateRef' } def __init__(self, active_deadline_seconds=None, affinity=None, arguments=None, artifact_repository_ref=None, automount_service_account_token=None, dns_config=None, dns_policy=None, entrypoint=None, executor=None, host_aliases=None, host_network=None, image_pull_secrets=None, metrics=None, node_selector=None, on_exit=None, parallelism=None, pod_disruption_budget=None, pod_gc=None, pod_priority=None, pod_priority_class_name=None, pod_spec_patch=None, priority=None, retry_strategy=None, scheduler_name=None, security_context=None, service_account_name=None, shutdown=None, suspend=None, synchronization=None, templates=None, tolerations=None, ttl_seconds_after_finished=None, ttl_strategy=None, volume_claim_gc=None, volume_claim_templates=None, volumes=None, workflow_metadata=None, workflow_template_ref=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._active_deadline_seconds = None self._affinity = None self._arguments = None self._artifact_repository_ref = None self._automount_service_account_token = None self._dns_config = None self._dns_policy = None self._entrypoint = None self._executor = None self._host_aliases = None self._host_network = None self._image_pull_secrets = None self._metrics = None self._node_selector = None self._on_exit = None self._parallelism = None self._pod_disruption_budget = None self._pod_gc = None self._pod_priority = None self._pod_priority_class_name = None self._pod_spec_patch = None self._priority = None self._retry_strategy = None self._scheduler_name = None self._security_context = None self._service_account_name = None self._shutdown = None self._suspend = None self._synchronization = None self._templates = None self._tolerations = None self._ttl_seconds_after_finished = None self._ttl_strategy = None self._volume_claim_gc = None self._volume_claim_templates = None self._volumes = None self._workflow_metadata = None self._workflow_template_ref = None self.discriminator = None if active_deadline_seconds is not None: self.active_deadline_seconds = active_deadline_seconds if affinity is not None: self.affinity = affinity if arguments is not None: self.arguments = arguments if artifact_repository_ref is not None: self.artifact_repository_ref = artifact_repository_ref if automount_service_account_token is not None: self.automount_service_account_token = automount_service_account_token if dns_config is not None: self.dns_config = dns_config if dns_policy is not None: self.dns_policy = dns_policy if entrypoint is not None: self.entrypoint = entrypoint if executor is not None: self.executor = executor if host_aliases is not None: self.host_aliases = host_aliases if host_network is not None: self.host_network = host_network if image_pull_secrets is not None: self.image_pull_secrets = image_pull_secrets if metrics is not None: self.metrics = metrics if node_selector is not None: self.node_selector = node_selector if on_exit is not None: self.on_exit = on_exit if parallelism is not None: self.parallelism = parallelism if pod_disruption_budget is not None: self.pod_disruption_budget = pod_disruption_budget if pod_gc is not None: self.pod_gc = pod_gc if pod_priority is not None: self.pod_priority = pod_priority if pod_priority_class_name is not None: self.pod_priority_class_name = pod_priority_class_name if pod_spec_patch is not None: self.pod_spec_patch = pod_spec_patch if priority is not None: self.priority = priority if retry_strategy is not None: self.retry_strategy = retry_strategy if scheduler_name is not None: self.scheduler_name = scheduler_name if security_context is not None: self.security_context = security_context if service_account_name is not None: self.service_account_name = service_account_name if shutdown is not None: self.shutdown = shutdown if suspend is not None: self.suspend = suspend if synchronization is not None: self.synchronization = synchronization if templates is not None: self.templates = templates if tolerations is not None: self.tolerations = tolerations if ttl_seconds_after_finished is not None: self.ttl_seconds_after_finished = ttl_seconds_after_finished if ttl_strategy is not None: self.ttl_strategy = ttl_strategy if volume_claim_gc is not None: self.volume_claim_gc = volume_claim_gc if volume_claim_templates is not None: self.volume_claim_templates = volume_claim_templates if volumes is not None: self.volumes = volumes if workflow_metadata is not None: self.workflow_metadata = workflow_metadata if workflow_template_ref is not None: self.workflow_template_ref = workflow_template_ref @property def active_deadline_seconds(self): return self._active_deadline_seconds @active_deadline_seconds.setter def active_deadline_seconds(self, active_deadline_seconds): self._active_deadline_seconds = active_deadline_seconds @property def affinity(self): return self._affinity @affinity.setter def affinity(self, affinity): self._affinity = affinity @property def arguments(self): return self._arguments @arguments.setter def arguments(self, arguments): self._arguments = arguments @property def artifact_repository_ref(self): return self._artifact_repository_ref @artifact_repository_ref.setter def artifact_repository_ref(self, artifact_repository_ref): self._artifact_repository_ref = artifact_repository_ref @property def automount_service_account_token(self): return self._automount_service_account_token @automount_service_account_token.setter def automount_service_account_token(self, automount_service_account_token): self._automount_service_account_token = automount_service_account_token @property def dns_config(self): return self._dns_config @dns_config.setter def dns_config(self, dns_config): self._dns_config = dns_config @property def dns_policy(self): return self._dns_policy @dns_policy.setter def dns_policy(self, dns_policy): self._dns_policy = dns_policy @property def entrypoint(self): return self._entrypoint @entrypoint.setter def entrypoint(self, entrypoint): self._entrypoint = entrypoint @property def executor(self): return self._executor @executor.setter def executor(self, executor): self._executor = executor @property
Apache License 2.0
google/apitools
samples/iam_sample/iam_v1/iam_v1_client.py
IamV1.Update
python
def Update(self, request, global_params=None): config = self.GetMethodConfig('Update') return self._RunMethod( config, request, global_params=global_params)
r"""Updates a ServiceAccount. Currently, only the following fields are updatable: `display_name` . The `etag` is mandatory. Args: request: (ServiceAccount) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (ServiceAccount) The response message.
https://github.com/google/apitools/blob/31cad2d904f356872d2965687e84b2d87ee2cdd3/samples/iam_sample/iam_v1/iam_v1_client.py#L465-L480
from __future__ import absolute_import from apitools.base.py import base_api from samples.iam_sample.iam_v1 import iam_v1_messages as messages class IamV1(base_api.BaseApiClient): MESSAGES_MODULE = messages BASE_URL = u'https://iam.googleapis.com/' MTLS_BASE_URL = u'' _PACKAGE = u'iam' _SCOPES = [u'https://www.googleapis.com/auth/cloud-platform'] _VERSION = u'v1' _CLIENT_ID = '1042881264118.apps.googleusercontent.com' _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b' _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b' _CLIENT_CLASS_NAME = u'IamV1' _URL_VERSION = u'v1' _API_KEY = None def __init__(self, url='', credentials=None, get_credentials=True, http=None, model=None, log_request=False, log_response=False, credentials_args=None, default_global_params=None, additional_http_headers=None, response_encoding=None): url = url or self.BASE_URL super(IamV1, self).__init__( url, credentials=credentials, get_credentials=get_credentials, http=http, model=model, log_request=log_request, log_response=log_response, credentials_args=credentials_args, default_global_params=default_global_params, additional_http_headers=additional_http_headers, response_encoding=response_encoding) self.iamPolicies = self.IamPoliciesService(self) self.projects_serviceAccounts_keys = self.ProjectsServiceAccountsKeysService(self) self.projects_serviceAccounts = self.ProjectsServiceAccountsService(self) self.projects = self.ProjectsService(self) self.roles = self.RolesService(self) class IamPoliciesService(base_api.BaseApiService): _NAME = u'iamPolicies' def __init__(self, client): super(IamV1.IamPoliciesService, self).__init__(client) self._upload_configs = { } def GetPolicyDetails(self, request, global_params=None): config = self.GetMethodConfig('GetPolicyDetails') return self._RunMethod( config, request, global_params=global_params) GetPolicyDetails.method_config = lambda: base_api.ApiMethodInfo( http_method=u'POST', method_id=u'iam.iamPolicies.getPolicyDetails', ordered_params=[], path_params=[], query_params=[], relative_path=u'v1/iamPolicies:getPolicyDetails', request_field='<request>', request_type_name=u'GetPolicyDetailsRequest', response_type_name=u'GetPolicyDetailsResponse', supports_download=False, ) class ProjectsServiceAccountsKeysService(base_api.BaseApiService): _NAME = u'projects_serviceAccounts_keys' def __init__(self, client): super(IamV1.ProjectsServiceAccountsKeysService, self).__init__(client) self._upload_configs = { } def Create(self, request, global_params=None): config = self.GetMethodConfig('Create') return self._RunMethod( config, request, global_params=global_params) Create.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys', http_method=u'POST', method_id=u'iam.projects.serviceAccounts.keys.create', ordered_params=[u'name'], path_params=[u'name'], query_params=[], relative_path=u'v1/{+name}/keys', request_field=u'createServiceAccountKeyRequest', request_type_name=u'IamProjectsServiceAccountsKeysCreateRequest', response_type_name=u'ServiceAccountKey', supports_download=False, ) def Delete(self, request, global_params=None): config = self.GetMethodConfig('Delete') return self._RunMethod( config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys/{keysId}', http_method=u'DELETE', method_id=u'iam.projects.serviceAccounts.keys.delete', ordered_params=[u'name'], path_params=[u'name'], query_params=[], relative_path=u'v1/{+name}', request_field='', request_type_name=u'IamProjectsServiceAccountsKeysDeleteRequest', response_type_name=u'Empty', supports_download=False, ) def Get(self, request, global_params=None): config = self.GetMethodConfig('Get') return self._RunMethod( config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys/{keysId}', http_method=u'GET', method_id=u'iam.projects.serviceAccounts.keys.get', ordered_params=[u'name'], path_params=[u'name'], query_params=[u'publicKeyType'], relative_path=u'v1/{+name}', request_field='', request_type_name=u'IamProjectsServiceAccountsKeysGetRequest', response_type_name=u'ServiceAccountKey', supports_download=False, ) def List(self, request, global_params=None): config = self.GetMethodConfig('List') return self._RunMethod( config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys', http_method=u'GET', method_id=u'iam.projects.serviceAccounts.keys.list', ordered_params=[u'name'], path_params=[u'name'], query_params=[u'keyTypes'], relative_path=u'v1/{+name}/keys', request_field='', request_type_name=u'IamProjectsServiceAccountsKeysListRequest', response_type_name=u'ListServiceAccountKeysResponse', supports_download=False, ) class ProjectsServiceAccountsService(base_api.BaseApiService): _NAME = u'projects_serviceAccounts' def __init__(self, client): super(IamV1.ProjectsServiceAccountsService, self).__init__(client) self._upload_configs = { } def Create(self, request, global_params=None): config = self.GetMethodConfig('Create') return self._RunMethod( config, request, global_params=global_params) Create.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts', http_method=u'POST', method_id=u'iam.projects.serviceAccounts.create', ordered_params=[u'name'], path_params=[u'name'], query_params=[], relative_path=u'v1/{+name}/serviceAccounts', request_field=u'createServiceAccountRequest', request_type_name=u'IamProjectsServiceAccountsCreateRequest', response_type_name=u'ServiceAccount', supports_download=False, ) def Delete(self, request, global_params=None): config = self.GetMethodConfig('Delete') return self._RunMethod( config, request, global_params=global_params) Delete.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}', http_method=u'DELETE', method_id=u'iam.projects.serviceAccounts.delete', ordered_params=[u'name'], path_params=[u'name'], query_params=[], relative_path=u'v1/{+name}', request_field='', request_type_name=u'IamProjectsServiceAccountsDeleteRequest', response_type_name=u'Empty', supports_download=False, ) def Get(self, request, global_params=None): config = self.GetMethodConfig('Get') return self._RunMethod( config, request, global_params=global_params) Get.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}', http_method=u'GET', method_id=u'iam.projects.serviceAccounts.get', ordered_params=[u'name'], path_params=[u'name'], query_params=[], relative_path=u'v1/{+name}', request_field='', request_type_name=u'IamProjectsServiceAccountsGetRequest', response_type_name=u'ServiceAccount', supports_download=False, ) def GetIamPolicy(self, request, global_params=None): config = self.GetMethodConfig('GetIamPolicy') return self._RunMethod( config, request, global_params=global_params) GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:getIamPolicy', http_method=u'POST', method_id=u'iam.projects.serviceAccounts.getIamPolicy', ordered_params=[u'resource'], path_params=[u'resource'], query_params=[], relative_path=u'v1/{+resource}:getIamPolicy', request_field='', request_type_name=u'IamProjectsServiceAccountsGetIamPolicyRequest', response_type_name=u'Policy', supports_download=False, ) def List(self, request, global_params=None): config = self.GetMethodConfig('List') return self._RunMethod( config, request, global_params=global_params) List.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts', http_method=u'GET', method_id=u'iam.projects.serviceAccounts.list', ordered_params=[u'name'], path_params=[u'name'], query_params=[u'pageSize', u'pageToken', u'removeDeletedServiceAccounts'], relative_path=u'v1/{+name}/serviceAccounts', request_field='', request_type_name=u'IamProjectsServiceAccountsListRequest', response_type_name=u'ListServiceAccountsResponse', supports_download=False, ) def SetIamPolicy(self, request, global_params=None): config = self.GetMethodConfig('SetIamPolicy') return self._RunMethod( config, request, global_params=global_params) SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:setIamPolicy', http_method=u'POST', method_id=u'iam.projects.serviceAccounts.setIamPolicy', ordered_params=[u'resource'], path_params=[u'resource'], query_params=[], relative_path=u'v1/{+resource}:setIamPolicy', request_field=u'setIamPolicyRequest', request_type_name=u'IamProjectsServiceAccountsSetIamPolicyRequest', response_type_name=u'Policy', supports_download=False, ) def SignBlob(self, request, global_params=None): config = self.GetMethodConfig('SignBlob') return self._RunMethod( config, request, global_params=global_params) SignBlob.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:signBlob', http_method=u'POST', method_id=u'iam.projects.serviceAccounts.signBlob', ordered_params=[u'name'], path_params=[u'name'], query_params=[], relative_path=u'v1/{+name}:signBlob', request_field=u'signBlobRequest', request_type_name=u'IamProjectsServiceAccountsSignBlobRequest', response_type_name=u'SignBlobResponse', supports_download=False, ) def SignJwt(self, request, global_params=None): config = self.GetMethodConfig('SignJwt') return self._RunMethod( config, request, global_params=global_params) SignJwt.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:signJwt', http_method=u'POST', method_id=u'iam.projects.serviceAccounts.signJwt', ordered_params=[u'name'], path_params=[u'name'], query_params=[], relative_path=u'v1/{+name}:signJwt', request_field=u'signJwtRequest', request_type_name=u'IamProjectsServiceAccountsSignJwtRequest', response_type_name=u'SignJwtResponse', supports_download=False, ) def TestIamPermissions(self, request, global_params=None): config = self.GetMethodConfig('TestIamPermissions') return self._RunMethod( config, request, global_params=global_params) TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:testIamPermissions', http_method=u'POST', method_id=u'iam.projects.serviceAccounts.testIamPermissions', ordered_params=[u'resource'], path_params=[u'resource'], query_params=[], relative_path=u'v1/{+resource}:testIamPermissions', request_field=u'testIamPermissionsRequest', request_type_name=u'IamProjectsServiceAccountsTestIamPermissionsRequest', response_type_name=u'TestIamPermissionsResponse', supports_download=False, )
Apache License 2.0
madra/channels-rest-framework
tests/test_bindings.py
ResourceBindingTestCase._send_and_consume
python
def _send_and_consume(self, channel, data): self.client.send_and_consume(channel, data) return self._get_next_message()
Helper that sends and consumes message and returns the next message.
https://github.com/madra/channels-rest-framework/blob/89bf75cf220a6498eaf9c9771425586e6dce7b89/tests/test_bindings.py#L35-L38
import json from rest_framework import serializers from rest_framework.exceptions import ValidationError from channels import route, Group from channels.tests import ChannelTestCase, Client, apply_routes from channels_api import bindings from channels_api.settings import api_settings from .models import TestModel class TestModelSerializer(serializers.ModelSerializer): class Meta: model = TestModel fields = ('id', 'name') class TestModelResourceBinding(bindings.ResourceBinding): model = TestModel queryset = TestModel.objects.all() serializer_class = TestModelSerializer stream = 'testmodel' class ResourceBindingTestCase(ChannelTestCase): def setUp(self): super().setUp() self.client = Client()
BSD 2-Clause Simplified License
jakartaresearch/earth-vision
earthvision/datasets/spacenet7.py
SpaceNet7.download
python
def download(self): if self.data_mode not in self.resources.keys(): raise ValueError("Unrecognized data_mode") downloader(self.resources[self.data_mode], self.root)
Download dataset and extract it
https://github.com/jakartaresearch/earth-vision/blob/c285ee82afc7e19d77ad50f35352d14dafce1aed/earthvision/datasets/spacenet7.py#L80-L85
from logging import root import os import sys import shutil import posixpath import numpy as np import pandas as pd import torch import multiprocessing import skimage from torch.utils.data import Dataset from torchvision.transforms import Resize from PIL import Image from .utils import downloader, _load_img from .spacenet7_utils import map_wrapper, make_geojsons_and_masks class SpaceNet7(Dataset): resources = { 'train': 's3://spacenet-dataset/spacenet/SN7_buildings/tarballs/SN7_buildings_train.tar.gz', 'test': 's3://spacenet-dataset/spacenet/SN7_buildings/tarballs/SN7_buildings_test_public.tar.gz'} def __init__(self, root: str, download: bool = False, data_mode: str = 'train'): self.root = root self.data_mode = data_mode self.filename = self.resources.get(data_mode, 'NULL').split('/')[-1] self.dataset_path = os.path.join(root, self.filename) data_mode_folder = {'train': 'train', 'test': 'test_public'} self.folder_name = data_mode_folder.get(data_mode, 'NULL') if not os.path.exists(self.root): os.makedirs(self.root) if download: if self._check_exists(self.dataset_path): raise ValueError("Raw data already exists.") else: self.download() if not self._check_exists(os.path.join(self.root, self.folder_name)): self.extract_file() else: print("Data already extracted.") if self.data_mode == 'train': aois = sorted([f for f in os.listdir(os.path.join(self.root, 'train')) if os.path.isdir(os.path.join(self.root, 'train', f))]) aois_without_mask = [] for aoi in aois: mask_dir = os.path.join(self.root, 'train', aoi, 'masks/') if not self._check_exists(mask_dir): aois_without_mask.append(aoi) if aois_without_mask: print('Generating masks...') self.generate_mask(aois_without_mask) self.img_labels = self.get_path_and_label() def _check_exists(self, obj) -> bool: if os.path.exists(obj): return True else: return False
MIT License
crash-override404/linepy-modified
akad/ShopService.py
Iface.placePurchaseOrderForFreeProduct
python
def placePurchaseOrderForFreeProduct(self, purchaseOrder): pass
Parameters: - purchaseOrder
https://github.com/crash-override404/linepy-modified/blob/5bc06174457dfeba9bb2a23187be9b0f2e09dee6/akad/ShopService.py#L469-L475
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException from thrift.protocol.TProtocol import TProtocolException from thrift.TRecursive import fix_spec import sys import logging from .ttypes import * from thrift.Thrift import TProcessor from thrift.transport import TTransport all_structs = [] class Iface(object): def buyCoinProduct(self, paymentReservation): pass def buyFreeProduct(self, receiverMid, productId, messageTemplate, language, country, packageId): pass def buyMustbuyProduct(self, receiverMid, productId, messageTemplate, language, country, packageId, serialNumber): pass def checkCanReceivePresent(self, recipientMid, packageId, language, country): pass def getActivePurchases(self, start, size, language, country): pass def getActivePurchaseVersions(self, start, size, language, country): pass def getCoinProducts(self, appStoreCode, country, language): pass def getCoinProductsByPgCode(self, appStoreCode, pgCode, country, language): pass def getCoinPurchaseHistory(self, request): pass def getCoinUseAndRefundHistory(self, request): pass def getDownloads(self, start, size, language, country): pass def getEventPackages(self, start, size, language, country): pass def getNewlyReleasedPackages(self, start, size, language, country): pass def getPopularPackages(self, start, size, language, country): pass def getPresentsReceived(self, start, size, language, country): pass def getPresentsSent(self, start, size, language, country): pass def getProductList(self, productIdList, language, country): pass def getProductListWithCarrier(self, productIdList, language, country, carrierCode): pass def getProductWithCarrier(self, packageID, language, country, carrierCode): pass def getPurchaseHistory(self, start, size, language, country): pass def getTotalBalance(self, appStoreCode): pass def notifyDownloaded(self, packageId, language): pass def reserveCoinPurchase(self, request): pass def reservePayment(self, paymentReservation): pass def canReceivePresent(self, shopId, productId, locale, recipientMid): pass def getAutoSuggestionShowcase(self, autoSuggestionShowcaseRequest): pass def getOldSticonMapping(self, req): pass def getOwnedProductSummaries(self, shopId, offset, limit, locale): pass def getOwnedProducts(self, shopId, offset, limit, locale): pass def getProductByVersion(self, shopId, productId, productVersion, locale): pass def getProductV2(self, request): pass def getProductValidationScheme(self, shopId, productId, productVersion): pass def getProduct(self, shopId, productId, locale): pass def getProductsByAuthor(self, productListByAuthorRequest): pass def getPurchasedProducts(self, shopId, offset, limit, locale): pass def getReceivedPresents(self, shopId, offset, limit, locale): pass def getRecommendOa(self, req): pass def getRecommendationForUser(self, shopId, offset, limit, locale): pass def getRecommendationList(self, getRecommendationRequest): pass def getSentPresents(self, shopId, offset, limit, locale): pass def getShowcaseV3(self, showcaseRequest): pass def getSuggestDictionarySetting(self, req): pass def getSuggestResourcesV2(self, req): pass def getUpdates(self, shopId, locale): pass def notifyProductEvent(self, shopId, productId, productVersion, productEvent): pass
BSD 3-Clause New or Revised License
eleme/ruskit
ruskit/cmds/manage.py
delete
python
def delete(args): nodes = [ClusterNode.from_uri(n) for n in args.nodes] cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster)) echo("Deleting...") for node in nodes: cluster.delete_node(node) cluster.wait()
Delete nodes from the cluster
https://github.com/eleme/ruskit/blob/2e8c5a3f6a65b8aeb07012b4e2c8ba324d887c3b/ruskit/cmds/manage.py#L63-L72
import datetime import redis import pprint from ruskit import cli from ..cluster import Cluster, ClusterNode from ..utils import echo from ..distribute import print_cluster, gen_distribution from ..utils import timeout_argument from ..health import HealthCheckManager @cli.command @cli.argument("cluster") @timeout_argument def info(args): cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster)) dis = [] for n in cluster.masters: slaves = ','.join([s["addr"] for s in n.slaves(n.name)]) msg = "{} {}:{} {} {}".format(n.name, n.host, n.port, len(n.slots), slaves) dis.append(msg) echo("\n".join(dis)) echo("Masters:", len(cluster.masters)) echo("Instances:", len(cluster.nodes)) echo("Slots:", sum(len(n.slots) for n in cluster.masters)) @cli.command @cli.argument("cluster") @timeout_argument def slowlog(args): cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster)) slow_logs = cluster.get_slow_logs() for master, logs in slow_logs.iteritems(): echo("Node: ", "%s:%s" % (master.host, master.port)) for log in logs: time = datetime.datetime.fromtimestamp(log['start_time']) echo( "\t", time, "%s%s" % (log['duration'], "μs"), repr(log['command']) ) @cli.command @cli.argument("cluster") @timeout_argument def fix(args): cluster = Cluster.from_node(ClusterNode.from_uri(args.cluster)) cluster.fix_open_slots() cluster.fill_slots() @cli.command @cli.argument("cluster") @cli.argument("nodes", nargs='+') @timeout_argument
MIT License
mediafactory/yats
modules/graph/cluster.py
unique
python
def unique(list): unique = []; [unique.append(x) for x in list if x not in unique] return unique
Returns a copy of the list without duplicates.
https://github.com/mediafactory/yats/blob/741a4c6b6fa4c691aa85fe0c48b2af0cae822037/modules/graph/cluster.py#L16-L20
from types import FunctionType, LambdaType def sorted(list, cmp=None, reversed=False): list = [x for x in list] list.sort(cmp) if reversed: list.reverse() return list
MIT License
vertexproject/synapse
synapse/lib/thishost.py
get
python
def get(prop): return hostinfo.get(prop)
Retrieve a property from the hostinfo dictionary. Example: import synapse.lib.thishost as s_thishost if s_thishost.get('platform') == 'windows': dostuff()
https://github.com/vertexproject/synapse/blob/a9d62ffacd9cc236ac52f92a734deef55c66ecf3/synapse/lib/thishost.py#L10-L24
import ctypes import socket import synapse.lib.thisplat as s_thisplat hostinfo = s_thisplat.initHostInfo() hostinfo['ptrsize'] = ctypes.sizeof(ctypes.c_void_p) hostinfo['hostname'] = socket.gethostname()
Apache License 2.0
davidmcclure/textplot
textplot/text.py
Text.tokenize
python
def tokenize(self): self.tokens = [] self.terms = OrderedDict() for token in utils.tokenize(self.text): if token['unstemmed'] in self.stopwords: self.tokens.append(None) else: self.tokens.append(token) offsets = self.terms.setdefault(token['stemmed'], []) offsets.append(token['offset'])
Tokenize the text.
https://github.com/davidmcclure/textplot/blob/889b949a637d99097ecec44ed4bfee53b1964dee/textplot/text.py#L72-L95
import os import re import matplotlib.pyplot as plt import textplot.utils as utils import numpy as np import pkgutil from nltk.stem import PorterStemmer from sklearn.neighbors import KernelDensity from collections import OrderedDict, Counter from scipy.spatial import distance from scipy import ndimage from functools import lru_cache class Text: @classmethod def from_file(cls, path): with open(path, 'r', errors='replace') as f: return cls(f.read()) def __init__(self, text, stopwords=None): self.text = text self.load_stopwords(stopwords) self.tokenize() def load_stopwords(self, path): if path: with open(path) as f: self.stopwords = set(f.read().splitlines()) else: self.stopwords = set( pkgutil .get_data('textplot', 'data/stopwords.txt') .decode('utf8') .splitlines() )
MIT License
carla-simulator/ros-bridge
carla_manual_control/src/carla_manual_control/carla_manual_control.py
HUD.gnss_updated
python
def gnss_updated(self, data): self.latitude = data.latitude self.longitude = data.longitude self.update_info_text()
Callback on gnss position updates
https://github.com/carla-simulator/ros-bridge/blob/dac9e729b70a3db9da665c1fdb843e96e7e25d04/carla_manual_control/src/carla_manual_control/carla_manual_control.py#L396-L402
from __future__ import print_function import datetime import math from threading import Thread import numpy from transforms3d.euler import quat2euler try: import pygame from pygame.locals import KMOD_CTRL from pygame.locals import KMOD_SHIFT from pygame.locals import K_COMMA from pygame.locals import K_DOWN from pygame.locals import K_ESCAPE from pygame.locals import K_F1 from pygame.locals import K_LEFT from pygame.locals import K_PERIOD from pygame.locals import K_RIGHT from pygame.locals import K_SLASH from pygame.locals import K_SPACE from pygame.locals import K_UP from pygame.locals import K_a from pygame.locals import K_d from pygame.locals import K_h from pygame.locals import K_m from pygame.locals import K_p from pygame.locals import K_q from pygame.locals import K_s from pygame.locals import K_w from pygame.locals import K_b except ImportError: raise RuntimeError('cannot import pygame, make sure pygame package is installed') import ros_compatibility as roscomp from ros_compatibility.node import CompatibleNode from ros_compatibility.qos import QoSProfile, DurabilityPolicy from carla_msgs.msg import CarlaStatus from carla_msgs.msg import CarlaEgoVehicleInfo from carla_msgs.msg import CarlaEgoVehicleStatus from carla_msgs.msg import CarlaEgoVehicleControl from carla_msgs.msg import CarlaLaneInvasionEvent from carla_msgs.msg import CarlaCollisionEvent from nav_msgs.msg import Odometry from sensor_msgs.msg import Image from sensor_msgs.msg import NavSatFix from std_msgs.msg import Bool class ManualControl(CompatibleNode): def __init__(self, resolution): super(ManualControl, self).__init__("ManualControl") self._surface = None self.role_name = self.get_param("role_name", "ego_vehicle") self.hud = HUD(self.role_name, resolution['width'], resolution['height'], self) self.controller = KeyboardControl(self.role_name, self.hud, self) self.image_subscriber = self.new_subscription( Image, "/carla/{}/rgb_view/image".format(self.role_name), self.on_view_image, qos_profile=10) self.collision_subscriber = self.new_subscription( CarlaCollisionEvent, "/carla/{}/collision".format(self.role_name), self.on_collision, qos_profile=10) self.lane_invasion_subscriber = self.new_subscription( CarlaLaneInvasionEvent, "/carla/{}/lane_invasion".format(self.role_name), self.on_lane_invasion, qos_profile=10) def on_collision(self, data): intensity = math.sqrt(data.normal_impulse.x**2 + data.normal_impulse.y**2 + data.normal_impulse.z**2) self.hud.notification('Collision with {} (impulse {})'.format( data.other_actor_id, intensity)) def on_lane_invasion(self, data): text = [] for marking in data.crossed_lane_markings: if marking is CarlaLaneInvasionEvent.LANE_MARKING_OTHER: text.append("Other") elif marking is CarlaLaneInvasionEvent.LANE_MARKING_BROKEN: text.append("Broken") elif marking is CarlaLaneInvasionEvent.LANE_MARKING_SOLID: text.append("Solid") else: text.append("Unknown ") self.hud.notification('Crossed line %s' % ' and '.join(text)) def on_view_image(self, image): array = numpy.frombuffer(image.data, dtype=numpy.dtype("uint8")) array = numpy.reshape(array, (image.height, image.width, 4)) array = array[:, :, :3] array = array[:, :, ::-1] self._surface = pygame.surfarray.make_surface(array.swapaxes(0, 1)) def render(self, game_clock, display): do_quit = self.controller.parse_events(game_clock) if do_quit: return self.hud.tick(game_clock) if self._surface is not None: display.blit(self._surface, (0, 0)) self.hud.render(display) class KeyboardControl(object): def __init__(self, role_name, hud, node): self.role_name = role_name self.hud = hud self.node = node self._autopilot_enabled = False self._control = CarlaEgoVehicleControl() self._steer_cache = 0.0 fast_qos = QoSProfile(depth=10) fast_latched_qos = QoSProfile(depth=10, durability=DurabilityPolicy.TRANSIENT_LOCAL) self.vehicle_control_manual_override_publisher = self.node.new_publisher( Bool, "/carla/{}/vehicle_control_manual_override".format(self.role_name), qos_profile=fast_latched_qos) self.vehicle_control_manual_override = False self.auto_pilot_enable_publisher = self.node.new_publisher( Bool, "/carla/{}/enable_autopilot".format(self.role_name), qos_profile=fast_qos) self.vehicle_control_publisher = self.node.new_publisher( CarlaEgoVehicleControl, "/carla/{}/vehicle_control_cmd_manual".format(self.role_name), qos_profile=fast_qos) self.carla_status_subscriber = self.node.new_subscription( CarlaStatus, "/carla/status", self._on_new_carla_frame, qos_profile=10) self.set_autopilot(self._autopilot_enabled) self.set_vehicle_control_manual_override( self.vehicle_control_manual_override) def set_vehicle_control_manual_override(self, enable): self.hud.notification('Set vehicle control manual override to: {}'.format(enable)) self.vehicle_control_manual_override_publisher.publish((Bool(data=enable))) def set_autopilot(self, enable): self.auto_pilot_enable_publisher.publish(Bool(data=enable)) def parse_events(self, clock): for event in pygame.event.get(): if event.type == pygame.QUIT: return True elif event.type == pygame.KEYUP: if self._is_quit_shortcut(event.key): return True elif event.key == K_F1: self.hud.toggle_info() elif event.key == K_h or (event.key == K_SLASH and pygame.key.get_mods() & KMOD_SHIFT): self.hud.help.toggle() elif event.key == K_b: self.vehicle_control_manual_override = not self.vehicle_control_manual_override self.set_vehicle_control_manual_override(self.vehicle_control_manual_override) if event.key == K_q: self._control.gear = 1 if self._control.reverse else -1 elif event.key == K_m: self._control.manual_gear_shift = not self._control.manual_gear_shift self.hud.notification( '%s Transmission' % ('Manual' if self._control.manual_gear_shift else 'Automatic')) elif self._control.manual_gear_shift and event.key == K_COMMA: self._control.gear = max(-1, self._control.gear - 1) elif self._control.manual_gear_shift and event.key == K_PERIOD: self._control.gear = self._control.gear + 1 elif event.key == K_p: self._autopilot_enabled = not self._autopilot_enabled self.set_autopilot(self._autopilot_enabled) self.hud.notification('Autopilot %s' % ('On' if self._autopilot_enabled else 'Off')) if not self._autopilot_enabled and self.vehicle_control_manual_override: self._parse_vehicle_keys(pygame.key.get_pressed(), clock.get_time()) self._control.reverse = self._control.gear < 0 def _on_new_carla_frame(self, data): if not self._autopilot_enabled and self.vehicle_control_manual_override: try: self.vehicle_control_publisher.publish(self._control) except Exception as error: self.node.logwarn("Could not send vehicle control: {}".format(error)) def _parse_vehicle_keys(self, keys, milliseconds): self._control.throttle = 1.0 if keys[K_UP] or keys[K_w] else 0.0 steer_increment = 5e-4 * milliseconds if keys[K_LEFT] or keys[K_a]: self._steer_cache -= steer_increment elif keys[K_RIGHT] or keys[K_d]: self._steer_cache += steer_increment else: self._steer_cache = 0.0 self._steer_cache = min(0.7, max(-0.7, self._steer_cache)) self._control.steer = round(self._steer_cache, 1) self._control.brake = 1.0 if keys[K_DOWN] or keys[K_s] else 0.0 self._control.hand_brake = bool(keys[K_SPACE]) @staticmethod def _is_quit_shortcut(key): return (key == K_ESCAPE) or (key == K_q and pygame.key.get_mods() & KMOD_CTRL) class HUD(object): def __init__(self, role_name, width, height, node): self.role_name = role_name self.dim = (width, height) self.node = node font = pygame.font.Font(pygame.font.get_default_font(), 20) fonts = [x for x in pygame.font.get_fonts() if 'mono' in x] default_font = 'ubuntumono' mono = default_font if default_font in fonts else fonts[0] mono = pygame.font.match_font(mono) self._font_mono = pygame.font.Font(mono, 14) self._notifications = FadingText(font, (width, 40), (0, height - 40)) self.help = HelpText(pygame.font.Font(mono, 24), width, height) self._show_info = True self._info_text = [] self.vehicle_status = CarlaEgoVehicleStatus() self.vehicle_status_subscriber = node.new_subscription( CarlaEgoVehicleStatus, "/carla/{}/vehicle_status".format(self.role_name), self.vehicle_status_updated, qos_profile=10) self.vehicle_info = CarlaEgoVehicleInfo() self.vehicle_info_subscriber = node.new_subscription( CarlaEgoVehicleInfo, "/carla/{}/vehicle_info".format(self.role_name), self.vehicle_info_updated, qos_profile=QoSProfile(depth=10, durability=DurabilityPolicy.TRANSIENT_LOCAL)) self.x, self.y, self.z = 0, 0, 0 self.yaw = 0 self.latitude = 0 self.longitude = 0 self.manual_control = False self.gnss_subscriber = node.new_subscription( NavSatFix, "/carla/{}/gnss".format(self.role_name), self.gnss_updated, qos_profile=10) self.odometry_subscriber = node.new_subscription( Odometry, "/carla/{}/odometry".format(self.role_name), self.odometry_updated, qos_profile=10 ) self.manual_control_subscriber = node.new_subscription( Bool, "/carla/{}/vehicle_control_manual_override".format(self.role_name), self.manual_control_override_updated, qos_profile=10) self.carla_status = CarlaStatus() self.status_subscriber = node.new_subscription( CarlaStatus, "/carla/status", self.carla_status_updated, qos_profile=10) def tick(self, clock): self._notifications.tick(clock) def carla_status_updated(self, data): self.carla_status = data self.update_info_text() def manual_control_override_updated(self, data): self.manual_control = data.data self.update_info_text() def vehicle_status_updated(self, vehicle_status): self.vehicle_status = vehicle_status self.update_info_text() def vehicle_info_updated(self, vehicle_info): self.vehicle_info = vehicle_info self.update_info_text()
MIT License
dokklib/dokklib-db
dokklib_db/table.py
Table._strip_prefixes
python
def _strip_prefixes(cls, item: Dict[str, Any]) -> ItemResult: item_copy = copy.deepcopy(item) for k, v in item_copy.items(): if isinstance(v, str): item_copy[k] = cls._remove_entity_prefix(v) return item_copy
Strip entity prefixes from a DB item.
https://github.com/dokklib/dokklib-db/blob/96d43fc39a6f6411591d2c0ee5e99b4314938e53/dokklib_db/table.py#L90-L96
import copy import re from contextlib import contextmanager from typing import Any, Dict, Iterable, Iterator, List, Mapping, NamedTuple, Optional, Tuple, Type, Union, cast import boto3 import boto3.dynamodb.conditions as cond import botocore.client import botocore.exceptions as botoex import dokklib_db.errors as err from dokklib_db.index import GlobalIndex, GlobalSecondaryIndex, PrimaryGlobalIndex from dokklib_db.keys import PartitionKey, PrefixSortKey, PrimaryKey, SortKey from dokklib_db.op_args import Attributes, DeleteArg, GetArg, InsertArg, OpArg, PutArg, QueryArg, UpdateArg from dokklib_db.serializer import Serializer ItemResult = Mapping[str, Any] class BatchGetResult(NamedTuple): items: List[ItemResult] unprocessed_keys: List[PrimaryKey] class Table: @staticmethod def _get_error_code(error: botoex.ClientError) -> str: db_error = error.response.get('Error', {}) return cast(str, db_error.get('Code', 'None')) @staticmethod def _remove_entity_prefix(string: str) -> str: pattern = r'^[A-Z0-9_]+#(.+)$' match = re.match(pattern, string) if match: return match.group(1) else: return string @classmethod @contextmanager def _dispatch_transaction_error(cls, op_args: List[OpArg]) -> Iterator[None]: try: yield None except botoex.ClientError as e: code = cls._get_error_code(e) if code == 'TransactionCanceledException': raise err.TransactionCanceledException(op_args, str(e), e.response, e.operation_name) else: raise cls._get_exception(e) @classmethod @contextmanager def _dispatch_error(cls) -> Iterator[None]: try: yield None except botoex.ClientError as e: raise cls._get_exception(e) @classmethod def _get_exception(cls, error: botoex.ClientError) -> err.ClientError: code = cls._get_error_code(error) try: ex_class = cast(Type[err.ClientError], getattr(err, code)) except AttributeError: ex_class = err.ClientError return ex_class(str(error), error.response, error.operation_name) @classmethod
Apache License 2.0
open-mmlab/mmediting
mmedit/models/common/gca_module.py
GCAModule.process_unknown_mask
python
def process_unknown_mask(self, unknown, img_feat, softmax_scale): n, _, h, w = img_feat.shape if unknown is not None: unknown = unknown.clone() unknown = F.interpolate( unknown, scale_factor=1 / self.rate, mode=self.interpolation) unknown_mean = unknown.mean(dim=[2, 3]) known_mean = 1 - unknown_mean unknown_scale = torch.clamp( torch.sqrt(unknown_mean / known_mean), 0.1, 10).to(img_feat) known_scale = torch.clamp( torch.sqrt(known_mean / unknown_mean), 0.1, 10).to(img_feat) softmax_scale = torch.cat([unknown_scale, known_scale], dim=1) else: unknown = torch.ones((n, 1, h, w)).to(img_feat) softmax_scale = torch.FloatTensor( [softmax_scale, softmax_scale]).view(1, 2).repeat(n, 1).to(img_feat) return unknown, softmax_scale
Process unknown mask. Args: unknown (Tensor, optional): Unknown area map generated by trimap of shape (N, 1, ori_h, ori_w) img_feat (Tensor): The interpolated image feature map of shape (N, img_c, img_h, img_w). softmax_scale (float, optional): The softmax scale of the attention if unknown area is not provided in forward. Default: 1. Returns: tuple: 2-tuple of ``Tensor``: Interpolated unknown area map of shape \ (N, img_h*img_w, img_h, img_w). ``Tensor``: Softmax scale tensor of known and unknown area of \ shape (N, 2).
https://github.com/open-mmlab/mmediting/blob/2afa00d8cf13582ec9011e5029a07814db6707d8/mmedit/models/common/gca_module.py#L274-L313
import torch import torch.nn as nn from mmcv.cnn import ConvModule, constant_init, xavier_init from torch.nn import functional as F class GCAModule(nn.Module): def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, rate=2, pad_args=dict(mode='reflect'), interpolation='nearest', penalty=-1e4, eps=1e-4): super().__init__() self.kernel_size = kernel_size self.stride = stride self.rate = rate self.pad_args = pad_args self.interpolation = interpolation self.penalty = penalty self.eps = eps self.guidance_conv = nn.Conv2d(in_channels, in_channels // 2, 1) self.out_conv = ConvModule( out_channels, out_channels, 1, norm_cfg=dict(type='BN'), act_cfg=None) self.init_weights() def init_weights(self): xavier_init(self.guidance_conv, distribution='uniform') xavier_init(self.out_conv.conv, distribution='uniform') constant_init(self.out_conv.norm, 1e-3) def forward(self, img_feat, alpha_feat, unknown=None, softmax_scale=1.): if alpha_feat.shape[2:4] != img_feat.shape[2:4]: raise ValueError( 'image feature size does not align with alpha feature size: ' f'image feature size {img_feat.shape[2:4]}, ' f'alpha feature size {alpha_feat.shape[2:4]}') if unknown is not None and unknown.shape[2:4] != img_feat.shape[2:4]: raise ValueError( 'image feature size does not align with unknown mask size: ' f'image feature size {img_feat.shape[2:4]}, ' f'unknown mask size {unknown.shape[2:4]}') img_feat = self.guidance_conv(img_feat) img_feat = F.interpolate( img_feat, scale_factor=1 / self.rate, mode=self.interpolation) unknown, softmax_scale = self.process_unknown_mask( unknown, img_feat, softmax_scale) img_ps, alpha_ps, unknown_ps = self.extract_feature_maps_patches( img_feat, alpha_feat, unknown) self_mask = self.get_self_correlation_mask(img_feat) img_groups = torch.split(img_feat, 1, dim=0) img_ps_groups = torch.split(img_ps, 1, dim=0) alpha_ps_groups = torch.split(alpha_ps, 1, dim=0) unknown_ps_groups = torch.split(unknown_ps, 1, dim=0) scale_groups = torch.split(softmax_scale, 1, dim=0) groups = (img_groups, img_ps_groups, alpha_ps_groups, unknown_ps_groups, scale_groups) out = [] for img_i, img_ps_i, alpha_ps_i, unknown_ps_i, scale_i in zip(*groups): similarity_map = self.compute_similarity_map(img_i, img_ps_i) gca_score = self.compute_guided_attention_score( similarity_map, unknown_ps_i, scale_i, self_mask) out_i = self.propagate_alpha_feature(gca_score, alpha_ps_i) out.append(out_i) out = torch.cat(out, dim=0) out.reshape_as(alpha_feat) out = self.out_conv(out) + alpha_feat return out def extract_feature_maps_patches(self, img_feat, alpha_feat, unknown): img_ks = self.kernel_size img_ps = self.extract_patches(img_feat, img_ks, self.stride) alpha_ps = self.extract_patches(alpha_feat, self.rate * 2, self.rate) unknown_ps = self.extract_patches(unknown, img_ks, self.stride) unknown_ps = unknown_ps.squeeze(dim=2) unknown_ps = unknown_ps.mean(dim=[2, 3], keepdim=True) return img_ps, alpha_ps, unknown_ps def compute_similarity_map(self, img_feat, img_ps): img_ps = img_ps[0] escape_NaN = torch.FloatTensor([self.eps]).to(img_feat) img_ps_normed = img_ps / torch.max(self.l2_norm(img_ps), escape_NaN) img_feat = self.pad(img_feat, self.kernel_size, self.stride) similarity_map = F.conv2d(img_feat, img_ps_normed) return similarity_map def compute_guided_attention_score(self, similarity_map, unknown_ps, scale, self_mask): unknown_scale, known_scale = scale[0] out = similarity_map * ( unknown_scale * unknown_ps.gt(0.).float() + known_scale * unknown_ps.le(0.).float()) out = out + self_mask * unknown_ps gca_score = F.softmax(out, dim=1) return gca_score def propagate_alpha_feature(self, gca_score, alpha_ps): alpha_ps = alpha_ps[0] if self.rate == 1: gca_score = self.pad(gca_score, kernel_size=2, stride=1) alpha_ps = alpha_ps.permute(1, 0, 2, 3) out = F.conv2d(gca_score, alpha_ps) / 4. else: out = F.conv_transpose2d( gca_score, alpha_ps, stride=self.rate, padding=1) / 4. return out
Apache License 2.0
hyde/hyde
hyde/ext/plugins/meta.py
Group.set_expando
python
def set_expando(self, key, value): if key == "groups": self.groups = [Group(group, parent=self) for group in value] else: return super(Group, self).set_expando(key, value)
If the key is groups, creates group objects instead of regular expando objects.
https://github.com/hyde/hyde/blob/7f415402cc3e007a746eb2b5bc102281fdb415bd/hyde/ext/plugins/meta.py#L611-L619
from collections import namedtuple from functools import partial from operator import attrgetter import re import sys from hyde._compat import basestring, filter, iteritems, str from hyde.exceptions import HydeException from hyde.model import Expando from hyde.plugin import Plugin from hyde.site import Node, Resource from hyde.util import add_method, add_property, pairwalk from fswrap import File, Folder import yaml class Metadata(Expando): def __init__(self, data, parent=None): super(Metadata, self).__init__({}) if parent: self.update(parent.__dict__) if data: self.update(data) def update(self, data): if isinstance(data, basestring): super(Metadata, self).update(yaml.load(data)) else: super(Metadata, self).update(data) class MetaPlugin(Plugin): def __init__(self, site): super(MetaPlugin, self).__init__(site) self.yaml_finder = re.compile( r"^\s*(?:---|===)\s*\n((?:.|\n)+?)\n\s*(?:---|===)\s*\n*", re.MULTILINE) def begin_site(self): config = self.site.config metadata = config.meta if hasattr(config, 'meta') else {} self.site.meta = Metadata(metadata) self.nodemeta = 'nodemeta.yaml' if hasattr(self.site.meta, 'nodemeta'): self.nodemeta = self.site.meta.nodemeta for node in self.site.content.walk(): self.__read_node__(node) for resource in node.resources: if not hasattr(resource, 'meta'): resource.meta = Metadata({}, node.meta) if resource.source_file.is_text and not resource.simple_copy: self.__read_resource__( resource, resource.source_file.read_all()) def __read_resource__(self, resource, text): self.logger.debug( "Trying to load metadata from resource [%s]" % resource) match = re.match(self.yaml_finder, text) if not match: self.logger.debug("No metadata found in resource [%s]" % resource) data = {} else: text = text[match.end():] data = match.group(1) if not hasattr(resource, 'meta') or not resource.meta: if not hasattr(resource.node, 'meta'): resource.node.meta = Metadata({}) resource.meta = Metadata(data, resource.node.meta) else: resource.meta.update(data) self.__update_standard_attributes__(resource) self.logger.debug("Successfully loaded metadata from resource [%s]" % resource) return text or ' ' def __update_standard_attributes__(self, obj): if not hasattr(obj, 'meta'): return standard_attributes = ['is_processable', 'uses_template'] for attr in standard_attributes: if hasattr(obj.meta, attr): setattr(obj, attr, getattr(obj.meta, attr)) def __read_node__(self, node): nodemeta = node.get_resource(self.nodemeta) parent_meta = node.parent.meta if node.parent else self.site.meta if nodemeta: nodemeta.is_processable = False metadata = nodemeta.source_file.read_all() if hasattr(node, 'meta') and node.meta: node.meta.update(metadata) else: node.meta = Metadata(metadata, parent=parent_meta) else: node.meta = Metadata({}, parent=parent_meta) self.__update_standard_attributes__(node) def begin_node(self, node): self.__read_node__(node) def begin_text_resource(self, resource, text): return self.__read_resource__(resource, text) class AutoExtendPlugin(Plugin): def __init__(self, site): super(AutoExtendPlugin, self).__init__(site) def begin_text_resource(self, resource, text): if not resource.uses_template: return text layout = None block = None try: layout = resource.meta.extends except AttributeError: pass try: block = resource.meta.default_block except AttributeError: pass if layout: self.logger.debug("Autoextending %s with %s" % ( resource.relative_path, layout)) extends_pattern = self.template.patterns['extends'] if not re.search(extends_pattern, text): extended_text = self.template.get_extends_statement(layout) extended_text += '\n' if block: extended_text += ('%s\n%s\n%s' % (self.t_block_open_tag(block), text, self.t_block_close_tag(block))) else: extended_text += text return extended_text return text class Tag(Expando): def __init__(self, name): self.name = name self.resources = [] def __repr__(self): return self.name def __str__(self): return self.name def get_tagger_sort_method(site): config = site.config content = site.content walker = 'walk_resources' sorter = None try: sorter = attrgetter('tagger.sorter')(config) walker = walker + '_sorted_by_%s' % sorter except AttributeError: pass try: walker = getattr(content, walker) except AttributeError: HydeException.reraise( "Cannot find the sorter: %s" % sorter, sys.exc_info()) return walker def walk_resources_tagged_with(node, tag): tags = set(str(tag).split('+')) walker = get_tagger_sort_method(node.site) for resource in walker(): try: taglist = set(attrgetter("meta.tags")(resource)) except AttributeError: continue if tags <= taglist: yield resource class TaggerPlugin(Plugin): def __init__(self, site): super(TaggerPlugin, self).__init__(site) def begin_site(self): self.logger.debug("Adding tags from metadata") tags = {} add_method(Node, 'walk_resources_tagged_with', walk_resources_tagged_with) walker = get_tagger_sort_method(self.site) for resource in walker(): self._process_tags_in_resource(resource, tags) self._process_tag_metadata(tags) self.site.tagger = Expando(dict(tags=tags)) self._generate_archives() def _process_tag_metadata(self, tags): try: tag_meta = self.site.config.tagger.tags.to_dict() except AttributeError: tag_meta = {} for tagname, meta in iteritems(tag_meta): if 'resources' in meta: del(meta['resources']) if 'name' in meta: del(meta['name']) if tagname in tags: tags[tagname].update(meta) def _process_tags_in_resource(self, resource, tags): try: taglist = attrgetter("meta.tags")(resource) except AttributeError: return for tagname in taglist: if tagname not in tags: tag = Tag(tagname) tags[tagname] = tag tag.resources.append(resource) add_method(Node, 'walk_resources_tagged_with_%s' % tagname, walk_resources_tagged_with, tag=tag) else: tags[tagname].resources.append(resource) if not hasattr(resource, 'tags'): setattr(resource, 'tags', []) resource.tags.append(tags[tagname]) def _generate_archives(self): archive_config = None try: archive_config = attrgetter("tagger.archives")(self.site.config) except AttributeError: return self.logger.debug("Generating archives for tags") for name, config in iteritems(archive_config.to_dict()): self._create_tag_archive(config) def _create_tag_archive(self, config): if 'template' not in config: raise HydeException( "No Template specified in tagger configuration.") content = self.site.content.source_folder source = Folder(config.get('source', '')) target = content.child_folder(config.get('target', 'tags')) if not target.exists: target.make() meta = config.get('meta', {}) meta_text = u'' if meta: import yaml meta_text = yaml.dump(meta, default_flow_style=False) extension = config.get('extension', 'html') template = config['template'] archive_text = u""" --- extends: false %(meta)s --- {%% set tag = site.tagger.tags['%(tag)s'] %%} {%% set source = site.content.node_from_relative_path('%(node)s') %%} {%% set walker = source['walk_resources_tagged_with_%(tag)s'] %%} {%% extends "%(template)s" %%} """ for tagname, tag in iteritems(self.site.tagger.tags.to_dict()): tag_data = { "tag": tagname, "node": source.name, "template": template, "meta": meta_text } text = archive_text % tag_data archive_file = File(target.child("%s.%s" % (tagname, extension))) archive_file.delete() archive_file.write(text.strip()) self.site.content.add_resource(archive_file) def filter_method(item, settings=None): all_match = True default_filters = {} filters = {} if hasattr(settings, 'filters'): filters.update(default_filters) filters.update(settings.filters.__dict__) for field, value in filters.items(): try: res = attrgetter(field)(item) except: res = None if res != value: all_match = False break return all_match def attributes_checker(item, attributes=None): try: attrgetter(*attributes)(item) return True except AttributeError: return False def sort_method(node, settings=None): attr = 'name' if settings and hasattr(settings, 'attr') and settings.attr: attr = settings.attr reverse = False if settings and hasattr(settings, 'reverse'): reverse = settings.reverse if not isinstance(attr, list): attr = [attr] filter_ = partial(filter_method, settings=settings) excluder_ = partial(attributes_checker, attributes=attr) resources = filter(lambda x: excluder_(x) and filter_(x), node.walk_resources()) return sorted(resources, key=attrgetter(*attr), reverse=reverse) class SorterPlugin(Plugin): def __init__(self, site): super(SorterPlugin, self).__init__(site) def begin_site(self): config = self.site.config if not hasattr(config, 'sorter'): return for name, settings in config.sorter.__dict__.items(): sort_method_name = 'walk_resources_sorted_by_%s' % name self.logger.debug("Adding sort methods for [%s]" % name) add_method(Node, sort_method_name, sort_method, settings=settings) match_method_name = 'is_%s' % name add_method(Resource, match_method_name, filter_method, settings) prev_att = 'prev_by_%s' % name next_att = 'next_by_%s' % name setattr(Resource, prev_att, None) setattr(Resource, next_att, None) walker = getattr(self.site.content, sort_method_name, self.site.content.walk_resources) first, last = None, None for prev, next in pairwalk(walker()): if not first: first = prev last = next setattr(prev, next_att, next) setattr(next, prev_att, prev) try: circular = settings.circular except AttributeError: circular = False if circular and first: setattr(first, prev_att, last) setattr(last, next_att, first) Grouper = namedtuple('Grouper', 'group resources') class Group(Expando): def __init__(self, grouping, parent=None): self.name = 'groups' self.parent = parent self.root = self self.root = parent.root if parent else self self.groups = [] self.sorter = getattr(grouping, 'sorter', None) if hasattr(parent, 'sorter'): self.sorter = parent.sorter super(Group, self).__init__(grouping) add_method(Node, 'walk_%s_groups' % self.name, Group.walk_groups_in_node, group=self) add_method(Node, 'walk_resources_grouped_by_%s' % self.name, Group.walk_resources, group=self) add_property(Resource, '%s_group' % self.name, Group.get_resource_group, group=self) add_method(Resource, 'walk_%s_groups' % self.name, Group.walk_resource_groups, group=self)
MIT License
nsls-ii/pyxrf
pyxrf/gui_module/tab_wd_plots_rgb_maps.py
PlotRgbMaps.slot_update_ranges
python
def slot_update_ranges(self): range_table, limit_table, _ = self.gpc.get_rgb_maps_info_table() self.rgb_selection.set_ranges_and_limits(range_table=range_table, limit_table=limit_table)
Update only ranges and selections for the emission lines
https://github.com/nsls-ii/pyxrf/blob/0aa4e175f541edfaa8f71daf54b54a07e4ab2b04/pyxrf/gui_module/tab_wd_plots_rgb_maps.py#L179-L182
from qtpy.QtWidgets import QWidget, QVBoxLayout, QHBoxLayout, QComboBox, QCheckBox, QSpacerItem from qtpy.QtCore import Signal, Slot from .useful_widgets import set_tooltip, global_gui_variables from matplotlib.backends.backend_qt5agg import ( FigureCanvasQTAgg as FigureCanvas, NavigationToolbar2QT as NavigationToolbar, ) from .wd_rgb_selection import RgbSelectionWidget import logging logger = logging.getLogger(__name__) class PlotRgbMaps(QWidget): signal_rgb_maps_dataset_selection_changed = Signal() signal_rgb_maps_norm_changed = Signal() signal_redraw_rgb_maps = Signal() def __init__(self, *, gpc, gui_vars): super().__init__() self._enable_plot_updates = True self._changes_exist = False self._enable_events = False self.gpc = gpc self.gui_vars = gui_vars self.combo_select_dataset = QComboBox() self.combo_select_dataset.setSizeAdjustPolicy(QComboBox.AdjustToContents) self.combo_normalization = QComboBox() self.cb_interpolate = QCheckBox("Interpolate") self.cb_interpolate.setChecked(self.gpc.get_rgb_maps_grid_interpolate()) self.cb_interpolate.toggled.connect(self.cb_interpolate_toggled) self.cb_quantitative = QCheckBox("Quantitative") self.cb_quantitative.setChecked(self.gpc.get_maps_quant_norm_enabled()) self.cb_quantitative.toggled.connect(self.cb_quantitative_toggled) self.combo_pixels_positions = QComboBox() self._pix_pos_values = ["Pixels", "Positions"] self.combo_pixels_positions.addItems(self._pix_pos_values) self.combo_pixels_positions.setCurrentIndex(self._pix_pos_values.index(self.gpc.get_maps_pixel_or_pos())) self.combo_pixels_positions.currentIndexChanged.connect(self.combo_pixels_positions_current_index_changed) self.mpl_canvas = FigureCanvas(self.gpc.img_model_rgb.fig) self.mpl_toolbar = NavigationToolbar(self.mpl_canvas, self) sp_retain = self.mpl_canvas.sizePolicy() sp_retain.setRetainSizeWhenHidden(True) self.mpl_canvas.setSizePolicy(sp_retain) self.rgb_selection = RgbSelectionWidget() self.slot_update_dataset_info() self.rgb_selection.signal_update_map_selections.connect(self._update_map_selections) self.widgets_enable_events(True) vbox = QVBoxLayout() hbox = QHBoxLayout() hbox.addWidget(self.combo_select_dataset) hbox.addWidget(self.combo_normalization) hbox.addStretch(1) hbox.addWidget(self.cb_quantitative) hbox.addWidget(self.cb_interpolate) hbox.addWidget(self.combo_pixels_positions) vbox.addLayout(hbox) vbox.addWidget(self.mpl_toolbar) vbox.addWidget(self.mpl_canvas) hbox = QHBoxLayout() hbox.addSpacerItem(QSpacerItem(0, 0)) hbox.addWidget(self.rgb_selection) hbox.addSpacerItem(QSpacerItem(0, 0)) vbox.addLayout(hbox) self.setLayout(vbox) self._set_tooltips() def _set_tooltips(self): set_tooltip(self.combo_select_dataset, "Select <b>dataset</b>.") set_tooltip(self.combo_normalization, "Select <b>scaler</b> for normalization of displayed XRF maps.") set_tooltip(self.cb_interpolate, "Interpolate coordinates to <b>uniform grid</b>.") set_tooltip( self.cb_quantitative, "Normalize the displayed XRF maps using loaded <b>Quantitative Calibration</b> data.", ) set_tooltip( self.combo_pixels_positions, "Switch axes units between <b>pixels</b> and <b>positional units</b>." ) set_tooltip( self.rgb_selection, "Select XRF Maps displayed in <b>Red</b>, <b>Green</b> and " "<b>Blue</b> colors and adjust the range of <b>intensity</b> for each " "displayed map.", ) def widgets_enable_events(self, status): if status: if not self._enable_events: self.combo_select_dataset.currentIndexChanged.connect( self.combo_select_dataset_current_index_changed ) self.combo_normalization.currentIndexChanged.connect( self.combo_normalization_current_index_changed ) self._enable_events = True else: if self._enable_events: self.combo_select_dataset.currentIndexChanged.disconnect( self.combo_select_dataset_current_index_changed ) self.combo_normalization.currentIndexChanged.disconnect( self.combo_normalization_current_index_changed ) self._enable_events = False def update_widget_state(self, condition=None): if condition == "tooltips": self._set_tooltips() self.mpl_toolbar.setVisible(self.gui_vars["show_matplotlib_toolbar"]) state_compute = global_gui_variables["gui_state"]["running_computations"] self.mpl_canvas.setVisible(not state_compute) def combo_select_dataset_current_index_changed(self, index): self.gpc.set_rgb_maps_selected_dataset(index + 1) self._update_dataset() self.signal_rgb_maps_dataset_selection_changed.emit() @Slot() def combo_select_dataset_update_current_index(self): index = self.gpc.get_rgb_maps_selected_dataset() self.combo_select_dataset.setCurrentIndex(index - 1) def combo_normalization_current_index_changed(self, index): self.gpc.set_rgb_maps_scaler_index(index) self.slot_update_ranges() self.signal_rgb_maps_norm_changed.emit() def combo_pixels_positions_current_index_changed(self, index): self.gpc.set_rgb_maps_pixel_or_pos(self._pix_pos_values[index]) def cb_interpolate_toggled(self, state): self.gpc.set_rgb_maps_grid_interpolate(state) def cb_quantitative_toggled(self, state): self.gpc.set_rgb_maps_quant_norm_enabled(state) self.slot_update_ranges() self.signal_rgb_maps_norm_changed.emit() @Slot() def slot_update_dataset_info(self): self._update_dataset_list() self._update_dataset() self.cb_quantitative.setChecked(self.gpc.get_rgb_maps_quant_norm_enabled()) def _update_dataset(self): self._update_scalers() range_table, limit_table, rgb_dict = self.gpc.get_rgb_maps_info_table() self.rgb_selection.set_ranges_and_limits( range_table=range_table, limit_table=limit_table, rgb_dict=rgb_dict ) @Slot()
BSD 3-Clause New or Revised License
docusign/docusign-python-client
docusign_esign/models/address_information_input.py
AddressInformationInput.receive_in_response
python
def receive_in_response(self, receive_in_response): self._receive_in_response = receive_in_response
Sets the receive_in_response of this AddressInformationInput. When set to **true**, the information needs to be returned in the response. # noqa: E501 :param receive_in_response: The receive_in_response of this AddressInformationInput. # noqa: E501 :type: str
https://github.com/docusign/docusign-python-client/blob/c6aeafff0d046fa6c10a398be83ba9e24b05d4ea/docusign_esign/models/address_information_input.py#L118-L127
import pprint import re import six from docusign_esign.client.configuration import Configuration class AddressInformationInput(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'address_information': 'AddressInformation', 'display_level_code': 'str', 'receive_in_response': 'str' } attribute_map = { 'address_information': 'addressInformation', 'display_level_code': 'displayLevelCode', 'receive_in_response': 'receiveInResponse' } def __init__(self, _configuration=None, **kwargs): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._address_information = None self._display_level_code = None self._receive_in_response = None self.discriminator = None setattr(self, "_{}".format('address_information'), kwargs.get('address_information', None)) setattr(self, "_{}".format('display_level_code'), kwargs.get('display_level_code', None)) setattr(self, "_{}".format('receive_in_response'), kwargs.get('receive_in_response', None)) @property def address_information(self): return self._address_information @address_information.setter def address_information(self, address_information): self._address_information = address_information @property def display_level_code(self): return self._display_level_code @display_level_code.setter def display_level_code(self, display_level_code): self._display_level_code = display_level_code @property def receive_in_response(self): return self._receive_in_response @receive_in_response.setter
MIT License