repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
666dzy666/micronet
micronet/compression/quantization/wbwtab/quantize.py
BinaryActivation.backward
python
def backward(self, grad_output): (input,) = self.saved_tensors grad_input = grad_output.clone() grad_input[input.ge(1.0)] = 0 grad_input[input.le(-1.0)] = 0 return grad_input
#******************soft_ste***************** size = input.size() zeros = torch.zeros(size).cuda() grad = torch.max(zeros, 1 - torch.abs(input)) grad_input = grad_output * grad
https://github.com/666dzy666/micronet/blob/c31cdd28ed72376e861704addf718ac485919621/micronet/compression/quantization/wbwtab/quantize.py#L22-L36
import copy import torch import torch.nn as nn import torch.nn.functional as F from torch.autograd import Function class BinaryActivation(Function): @staticmethod def forward(self, input): self.save_for_backward(input) output = torch.sign(input) output[output == 0] = 1 return output @staticmethod
MIT License
onshape-public/onshape-clients
python/onshape_client/oas/models/bt_parameter_spec_reference_table1520.py
BTParameterSpecReferenceTable1520.__init__
python
def __init__( self, _check_type=True, _from_server=False, _path_to_item=(), _configuration=None, **kwargs ): self._data_store = {} self._check_type = _check_type self._from_server = _from_server self._path_to_item = _path_to_item self._configuration = _configuration constant_args = { "_check_type": _check_type, "_path_to_item": _path_to_item, "_from_server": _from_server, "_configuration": _configuration, } required_args = {} required_arg_names = list(required_args.keys()) for required_arg_name in required_arg_names: if required_args[required_arg_name] is nulltype.Null: del required_args[required_arg_name] model_args = {} model_args.update(required_args) model_args.update(kwargs) composed_info = validate_get_composed_info(constant_args, model_args, self) self._composed_instances = composed_info[0] self._var_name_to_model_instances = composed_info[1] self._additional_properties_model_instances = composed_info[2] unused_args = composed_info[3] for var_name, var_value in required_args.items(): setattr(self, var_name, var_value) for var_name, var_value in six.iteritems(kwargs): if ( var_name in unused_args and self._configuration is not None and self._configuration.discard_unknown_keys and not self._additional_properties_model_instances ): continue setattr(self, var_name, var_value)
bt_parameter_spec_reference_table1520.BTParameterSpecReferenceTable1520 - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _from_server (bool): True if the data is from the server False if the data is from the client (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. bt_type (str): [optional] # noqa: E501 additional_localized_strings (int): [optional] # noqa: E501 column_name (str): [optional] # noqa: E501 default_value (btm_parameter1.BTMParameter1): [optional] # noqa: E501 icon_uri (str): [optional] # noqa: E501 localizable_name (str): [optional] # noqa: E501 localized_name (str): [optional] # noqa: E501 parameter_id (str): [optional] # noqa: E501 parameter_name (str): [optional] # noqa: E501 strings_to_localize ([str]): [optional] # noqa: E501 ui_hint (str): [optional] # noqa: E501 ui_hints ([str]): [optional] # noqa: E501 visibility_condition (bt_parameter_visibility_condition177.BTParameterVisibilityCondition177): [optional] # noqa: E501
https://github.com/onshape-public/onshape-clients/blob/20843a00c628e516e7219e17a23ec4ef2bf9f16f/python/onshape_client/oas/models/bt_parameter_spec_reference_table1520.py#L178-L255
from __future__ import absolute_import import re import sys import six import nulltype from onshape_client.oas.model_utils import ( ModelComposed, ModelNormal, ModelSimple, date, datetime, file_type, int, none_type, str, validate_get_composed_info, ) try: from onshape_client.oas.models import bt_configured_feature_column_info1014_all_of except ImportError: bt_configured_feature_column_info1014_all_of = sys.modules[ "onshape_client.oas.models.bt_configured_feature_column_info1014_all_of" ] try: from onshape_client.oas.models import bt_parameter_spec_reference_blob1367 except ImportError: bt_parameter_spec_reference_blob1367 = sys.modules[ "onshape_client.oas.models.bt_parameter_spec_reference_blob1367" ] try: from onshape_client.oas.models import bt_parameter_visibility_condition177 except ImportError: bt_parameter_visibility_condition177 = sys.modules[ "onshape_client.oas.models.bt_parameter_visibility_condition177" ] try: from onshape_client.oas.models import btm_parameter1 except ImportError: btm_parameter1 = sys.modules["onshape_client.oas.models.btm_parameter1"] class BTParameterSpecReferenceTable1520(ModelComposed): allowed_values = { ("ui_hints",): { "OPPOSITE_DIRECTION": "OPPOSITE_DIRECTION", "ALWAYS_HIDDEN": "ALWAYS_HIDDEN", "SHOW_CREATE_SELECTION": "SHOW_CREATE_SELECTION", "CONTROL_VISIBILITY": "CONTROL_VISIBILITY", "NO_PREVIEW_PROVIDED": "NO_PREVIEW_PROVIDED", "REMEMBER_PREVIOUS_VALUE": "REMEMBER_PREVIOUS_VALUE", "DISPLAY_SHORT": "DISPLAY_SHORT", "ALLOW_FEATURE_SELECTION": "ALLOW_FEATURE_SELECTION", "MATE_CONNECTOR_AXIS_TYPE": "MATE_CONNECTOR_AXIS_TYPE", "PRIMARY_AXIS": "PRIMARY_AXIS", "SHOW_EXPRESSION": "SHOW_EXPRESSION", "OPPOSITE_DIRECTION_CIRCULAR": "OPPOSITE_DIRECTION_CIRCULAR", "SHOW_LABEL": "SHOW_LABEL", "HORIZONTAL_ENUM": "HORIZONTAL_ENUM", "UNCONFIGURABLE": "UNCONFIGURABLE", "MATCH_LAST_ARRAY_ITEM": "MATCH_LAST_ARRAY_ITEM", "COLLAPSE_ARRAY_ITEMS": "COLLAPSE_ARRAY_ITEMS", "INITIAL_FOCUS_ON_EDIT": "INITIAL_FOCUS_ON_EDIT", "INITIAL_FOCUS": "INITIAL_FOCUS", "DISPLAY_CURRENT_VALUE_ONLY": "DISPLAY_CURRENT_VALUE_ONLY", "READ_ONLY": "READ_ONLY", "PREVENT_CREATING_NEW_MATE_CONNECTORS": "PREVENT_CREATING_NEW_MATE_CONNECTORS", "FIRST_IN_ROW": "FIRST_IN_ROW", "ALLOW_QUERY_ORDER": "ALLOW_QUERY_ORDER", "PREVENT_ARRAY_REORDER": "PREVENT_ARRAY_REORDER", "UNKNOWN": "UNKNOWN", }, } validations = {} additional_properties_type = None @staticmethod def openapi_types(): return { "bt_type": (str,), "additional_localized_strings": (int,), "column_name": (str,), "default_value": (btm_parameter1.BTMParameter1,), "icon_uri": (str,), "localizable_name": (str,), "localized_name": (str,), "parameter_id": (str,), "parameter_name": (str,), "strings_to_localize": ([str],), "ui_hint": (str,), "ui_hints": ([str],), "visibility_condition": ( bt_parameter_visibility_condition177.BTParameterVisibilityCondition177, ), } @staticmethod def discriminator(): return None attribute_map = { "bt_type": "btType", "additional_localized_strings": "additionalLocalizedStrings", "column_name": "columnName", "default_value": "defaultValue", "icon_uri": "iconUri", "localizable_name": "localizableName", "localized_name": "localizedName", "parameter_id": "parameterId", "parameter_name": "parameterName", "strings_to_localize": "stringsToLocalize", "ui_hint": "uiHint", "ui_hints": "uiHints", "visibility_condition": "visibilityCondition", } required_properties = set( [ "_data_store", "_check_type", "_from_server", "_path_to_item", "_configuration", "_composed_instances", "_var_name_to_model_instances", "_additional_properties_model_instances", ] )
MIT License
bpython/curtsies
curtsies/formatstringarray.py
FSArray.height
python
def height(self) -> int: return len(self.rows)
The number of rows
https://github.com/bpython/curtsies/blob/a6019a45eb9a9897f68b1a404245076cbd74492c/curtsies/formatstringarray.py#L109-L111
import itertools import sys import logging from .formatstring import fmtstr from .formatstring import normalize_slice from .formatstring import FmtStr from typing import ( Any, Union, List, Sequence, overload, Tuple, cast, no_type_check, ) logger = logging.getLogger(__name__) def slicesize(s: slice) -> int: return int((s.stop - s.start) / (s.step if s.step else 1)) class FSArray(Sequence): def __init__( self, num_rows: int, num_columns: int, *args: Any, **kwargs: Any ) -> None: self.saved_args, self.saved_kwargs = args, kwargs self.rows: List[FmtStr] = [fmtstr("", *args, **kwargs) for _ in range(num_rows)] self.num_columns = num_columns @overload def __getitem__(self, slicetuple: int) -> FmtStr: pass @overload def __getitem__(self, slicetuple: slice) -> List[FmtStr]: pass @overload def __getitem__( self, slicetuple: Tuple[Union[slice, int], Union[slice, int]] ) -> List[FmtStr]: pass def __getitem__( self, slicetuple: Union[int, slice, Tuple[Union[int, slice], Union[int, slice]]] ) -> Union[FmtStr, List[FmtStr]]: if isinstance(slicetuple, int): if slicetuple < 0: slicetuple = len(self.rows) - slicetuple if slicetuple < 0 or slicetuple >= len(self.rows): raise IndexError("out of bounds") return self.rows[slicetuple] if isinstance(slicetuple, slice): rowslice = normalize_slice(len(self.rows), slicetuple) return self.rows[rowslice] ( row_slice_or_int, col_slice_or_int, ) = slicetuple rowslice = normalize_slice(len(self.rows), row_slice_or_int) colslice = normalize_slice(self.num_columns, col_slice_or_int) return [fs[colslice] for fs in self.rows[rowslice]] def __len__(self) -> int: return len(self.rows) @property def shape(self) -> Tuple[int, int]: return len(self.rows), self.num_columns @property
MIT License
yelp/paasta
paasta_tools/smartstack_tools.py
MesosSmartstackEnvoyReplicationChecker.get_allowed_locations_and_hosts
python
def get_allowed_locations_and_hosts( self, instance_config: LongRunningServiceConfig ) -> Dict[str, Sequence[DiscoveredHost]]: discover_location_type = marathon_tools.load_service_namespace_config( service=instance_config.service, namespace=instance_config.get_nerve_namespace(), soa_dir=instance_config.soa_dir, ).get_discover() attribute_to_slaves = mesos_tools.get_mesos_slaves_grouped_by_attribute( slaves=self._mesos_slaves, attribute=discover_location_type ) ret: Dict[str, Sequence[DiscoveredHost]] = {} for attr, slaves in attribute_to_slaves.items(): ret[attr] = [ DiscoveredHost( hostname=slave["hostname"], pool=slave["attributes"]["pool"] ) for slave in slaves ] return ret
Returns a dict of locations and lists of corresponding mesos slaves where deployment of the instance is allowed. :param instance_config: An instance of MarathonServiceConfig :returns: A dict {"uswest1-prod": [DiscoveredHost(), DiscoveredHost(), ...]}
https://github.com/yelp/paasta/blob/bc1716253bbe003cec01bd02016010910c2b039c/paasta_tools/smartstack_tools.py#L688-L713
import abc import collections import csv import logging import socket from typing import Any from typing import cast from typing import Collection from typing import DefaultDict from typing import Dict from typing import Iterable from typing import List from typing import MutableMapping from typing import NamedTuple from typing import Optional from typing import Sequence from typing import Tuple from typing import TypeVar from typing import Union import requests from kubernetes.client import V1Node from kubernetes.client import V1Pod from mypy_extensions import TypedDict from paasta_tools import envoy_tools from paasta_tools import kubernetes_tools from paasta_tools import marathon_tools from paasta_tools import mesos_tools from paasta_tools.long_running_service_tools import LongRunningServiceConfig from paasta_tools.mesos.exceptions import NoSlavesAvailableError from paasta_tools.monitoring_tools import ReplicationChecker from paasta_tools.utils import compose_job_id from paasta_tools.utils import DEFAULT_SOA_DIR from paasta_tools.utils import DeployBlacklist from paasta_tools.utils import get_user_agent from paasta_tools.utils import SystemPaastaConfig class HaproxyBackend(TypedDict, total=False): check_code: str check_duration: str check_status: str lastchg: str pxname: str svname: str status: str log = logging.getLogger(__name__) def retrieve_haproxy_csv( synapse_host: str, synapse_port: int, synapse_haproxy_url_format: str, scope: str ) -> Iterable[Dict[str, str]]: synapse_uri = synapse_haproxy_url_format.format( host=synapse_host, port=synapse_port, scope=scope ) haproxy_request = requests.Session() haproxy_request.headers.update({"User-Agent": get_user_agent()}) haproxy_request.mount("http://", requests.adapters.HTTPAdapter(max_retries=3)) haproxy_request.mount("https://", requests.adapters.HTTPAdapter(max_retries=3)) haproxy_response = haproxy_request.get(synapse_uri, timeout=1) haproxy_data = haproxy_response.text reader = csv.DictReader(haproxy_data.splitlines()) return reader def get_backends( service: str, synapse_host: str, synapse_port: int, synapse_haproxy_url_format: str ) -> List[HaproxyBackend]: if service: services = [service] else: services = None return get_multiple_backends( services, synapse_host=synapse_host, synapse_port=synapse_port, synapse_haproxy_url_format=synapse_haproxy_url_format, ) def get_multiple_backends( services: Optional[Collection[str]], synapse_host: str, synapse_port: int, synapse_haproxy_url_format: str, ) -> List[HaproxyBackend]: if services is not None and len(services) == 1: (scope,) = services else: scope = "" reader = retrieve_haproxy_csv( synapse_host, synapse_port, synapse_haproxy_url_format=synapse_haproxy_url_format, scope=scope, ) backends = [] for line in reader: line["pxname"] = line.pop("# pxname") line.pop("") ha_slave, ha_service = line["svname"], line["pxname"] if (services is None or ha_service in services) and ha_slave not in ( "FRONTEND", "BACKEND", ): backends.append(cast(HaproxyBackend, line)) return backends def load_smartstack_info_for_service( service: str, namespace: str, blacklist: DeployBlacklist, system_paasta_config: SystemPaastaConfig, soa_dir: str = DEFAULT_SOA_DIR, ) -> Dict[str, Dict[str, int]]: service_namespace_config = marathon_tools.load_service_namespace_config( service=service, namespace=namespace, soa_dir=soa_dir ) discover_location_type = service_namespace_config.get_discover() return get_smartstack_replication_for_attribute( attribute=discover_location_type, service=service, namespace=namespace, blacklist=blacklist, system_paasta_config=system_paasta_config, ) def get_smartstack_replication_for_attribute( attribute: str, service: str, namespace: str, blacklist: DeployBlacklist, system_paasta_config: SystemPaastaConfig, ) -> Dict[str, Dict[str, int]]: replication_info = {} filtered_slaves = mesos_tools.get_all_slaves_for_blacklist_whitelist( blacklist=blacklist, whitelist=None ) if not filtered_slaves: raise NoSlavesAvailableError attribute_slave_dict = mesos_tools.get_mesos_slaves_grouped_by_attribute( slaves=filtered_slaves, attribute=attribute ) full_name = compose_job_id(service, namespace) for value, hosts in attribute_slave_dict.items(): synapse_host = hosts[0]["hostname"] repl_info = get_replication_for_services( synapse_host=synapse_host, synapse_port=system_paasta_config.get_synapse_port(), synapse_haproxy_url_format=system_paasta_config.get_synapse_haproxy_url_format(), services=[full_name], ) replication_info[value] = repl_info return replication_info def get_replication_for_all_services( synapse_host: str, synapse_port: int, synapse_haproxy_url_format: str ) -> Dict[str, int]: backends = get_multiple_backends( services=None, synapse_host=synapse_host, synapse_port=synapse_port, synapse_haproxy_url_format=synapse_haproxy_url_format, ) return collections.Counter([b["pxname"] for b in backends if backend_is_up(b)]) def get_replication_for_services( synapse_host: str, synapse_port: int, synapse_haproxy_url_format: str, services: Collection[str], ) -> Dict[str, int]: backends = get_multiple_backends( services=services, synapse_host=synapse_host, synapse_port=synapse_port, synapse_haproxy_url_format=synapse_haproxy_url_format, ) counter = collections.Counter([b["pxname"] for b in backends if backend_is_up(b)]) return {sn: counter[sn] for sn in services} def backend_is_up(backend: HaproxyBackend) -> bool: return str(backend["status"]).startswith("UP") def ip_port_hostname_from_svname(svname: str) -> Tuple[str, int, str]: parts = set(svname.split("_")) ip_ports = {part for part in parts if len(part.split(":")) == 2} hostname = parts.difference(ip_ports).pop() ip, port = ip_ports.pop().split(":") return ip, int(port), hostname def get_registered_marathon_tasks( synapse_host: str, synapse_port: int, synapse_haproxy_url_format: str, service: str, marathon_tasks: Iterable[marathon_tools.MarathonTask], ) -> List[marathon_tools.MarathonTask]: backends = get_multiple_backends( [service], synapse_host=synapse_host, synapse_port=synapse_port, synapse_haproxy_url_format=synapse_haproxy_url_format, ) healthy_tasks = [] for backend, task in match_backends_and_tasks(backends, marathon_tasks): if ( backend is not None and task is not None and backend["status"].startswith("UP") ): healthy_tasks.append(task) return healthy_tasks def are_services_up_on_ip_port( synapse_host: str, synapse_port: int, synapse_haproxy_url_format: str, services: Collection[str], host_ip: str, host_port: int, ) -> bool: backends = get_multiple_backends( services, synapse_host=synapse_host, synapse_port=synapse_port, synapse_haproxy_url_format=synapse_haproxy_url_format, ) backends_by_ip_port: DefaultDict[ Tuple[str, int], List[HaproxyBackend] ] = collections.defaultdict(list) for backend in backends: ip, port, _ = ip_port_hostname_from_svname(backend["svname"]) backends_by_ip_port[ip, port].append(backend) backends_on_ip = backends_by_ip_port[host_ip, host_port] services_with_atleast_one_backend_up = {service: False for service in services} for service in services: for be in backends_on_ip: if be["pxname"] == service and backend_is_up(be): services_with_atleast_one_backend_up[service] = True return all(services_with_atleast_one_backend_up.values()) def match_backends_and_tasks( backends: Iterable[HaproxyBackend], tasks: Iterable[marathon_tools.MarathonTask] ) -> List[Tuple[Optional[HaproxyBackend], Optional[marathon_tools.MarathonTask]]]: backends_by_ip_port: DefaultDict[ Tuple[str, int], List[HaproxyBackend] ] = collections.defaultdict(list) backend_task_pairs = [] for backend in backends: ip, port, _ = ip_port_hostname_from_svname(backend["svname"]) backends_by_ip_port[ip, port].append(backend) for task in tasks: ip = socket.gethostbyname(task.host) for port in task.ports: for backend in backends_by_ip_port.pop((ip, port), [None]): backend_task_pairs.append((backend, task)) for backends in backends_by_ip_port.values(): for backend in backends: backend_task_pairs.append((backend, None)) return backend_task_pairs def match_backends_and_pods( backends: Iterable[HaproxyBackend], pods: Iterable[V1Pod] ) -> List[Tuple[Optional[HaproxyBackend], Optional[V1Pod]]]: backends_by_ip: DefaultDict[str, List[HaproxyBackend]] = collections.defaultdict( list ) backend_pod_pairs = [] for backend in backends: ip, port, _ = ip_port_hostname_from_svname(backend["svname"]) backends_by_ip[ip].append(backend) for pod in pods: ip = pod.status.pod_ip for backend in backends_by_ip.pop(ip, [None]): backend_pod_pairs.append((backend, pod)) for backends in backends_by_ip.values(): for backend in backends: backend_pod_pairs.append((backend, None)) return backend_pod_pairs _MesosSlaveDict = TypeVar( "_MesosSlaveDict", bound=Dict ) class DiscoveredHost(NamedTuple): hostname: str pool: str class ServiceDiscoveryProvider(abc.ABC): NAME = "..." @abc.abstractmethod def get_replication_for_all_services(self, hostname: str) -> Dict[str, int]: ... class SmartstackServiceDiscovery(ServiceDiscoveryProvider): NAME = "Smartstack" def __init__(self, system_paasta_config: SystemPaastaConfig) -> None: self._synapse_port = system_paasta_config.get_synapse_port() self._synapse_haproxy_url_format = ( system_paasta_config.get_synapse_haproxy_url_format() ) def get_replication_for_all_services(self, hostname: str) -> Dict[str, int]: return get_replication_for_all_services( synapse_host=hostname, synapse_port=self._synapse_port, synapse_haproxy_url_format=self._synapse_haproxy_url_format, ) class EnvoyServiceDiscovery(ServiceDiscoveryProvider): NAME = "Envoy" def __init__(self, system_paasta_config: SystemPaastaConfig) -> None: self._envoy_admin_port = system_paasta_config.get_envoy_admin_port() self._envoy_admin_endpoint_format = ( system_paasta_config.get_envoy_admin_endpoint_format() ) def get_replication_for_all_services(self, hostname: str) -> Dict[str, int]: return envoy_tools.get_replication_for_all_services( envoy_host=hostname, envoy_admin_port=self._envoy_admin_port, envoy_admin_endpoint_format=self._envoy_admin_endpoint_format, ) def get_service_discovery_providers( system_paasta_config: SystemPaastaConfig, ) -> List[ServiceDiscoveryProvider]: providers: List[ServiceDiscoveryProvider] = [] for name, _ in system_paasta_config.get_service_discovery_providers().items(): if name == "smartstack": providers.append(SmartstackServiceDiscovery(system_paasta_config)) elif name == "envoy": providers.append(EnvoyServiceDiscovery(system_paasta_config)) else: log.warn("unknown provider") return providers class BaseReplicationChecker(ReplicationChecker): def __init__( self, system_paasta_config: SystemPaastaConfig, service_discovery_providers: Iterable[ServiceDiscoveryProvider], ) -> None: self._system_paasta_config = system_paasta_config self._cache: Dict[Tuple[str, str], Dict[str, int]] = {} self._service_discovery_providers = service_discovery_providers @abc.abstractmethod def get_allowed_locations_and_hosts( self, instance_config: LongRunningServiceConfig ) -> Dict[str, Sequence[DiscoveredHost]]: ... def get_replication_for_instance( self, instance_config: LongRunningServiceConfig ) -> Dict[str, Dict[str, Dict[str, int]]]: replication_infos = {} for provider in self._service_discovery_providers: replication_info = {} attribute_host_dict = self.get_allowed_locations_and_hosts(instance_config) instance_pool = instance_config.get_pool() for location, hosts in attribute_host_dict.items(): hostnames = self.get_hostnames_in_pool(hosts, instance_pool) for hostname in hostnames: try: replication_info[location] = self._get_replication_info( location, hostname, instance_config, provider ) break except Exception as e: log.warn( f"Error while getting replication info for {location} from {hostname}: {e}" ) if hostname == hostnames[-1]: raise replication_infos[provider.NAME] = replication_info return replication_infos def get_first_host_in_pool(self, hosts: Sequence[DiscoveredHost], pool: str) -> str: for host in hosts: if host.pool == pool: return host.hostname return hosts[0].hostname def get_hostnames_in_pool( self, hosts: Sequence[DiscoveredHost], pool: str ) -> Sequence[str]: hostnames = [] for host in hosts: if host.pool == pool: hostnames.append(host.hostname) if len(hostnames) == 0: hostnames.append(hosts[0].hostname) return hostnames def _get_replication_info( self, location: str, hostname: str, instance_config: LongRunningServiceConfig, provider: ServiceDiscoveryProvider, ) -> Dict[str, int]: full_name = compose_job_id(instance_config.service, instance_config.instance) key = (location, provider.NAME) replication_info = self._cache.get(key) if replication_info is None: replication_info = provider.get_replication_for_all_services(hostname) self._cache[key] = replication_info return {full_name: replication_info[full_name]} class MesosSmartstackEnvoyReplicationChecker(BaseReplicationChecker): def __init__( self, mesos_slaves: List[_MesosSlaveDict], system_paasta_config: SystemPaastaConfig, ) -> None: self._mesos_slaves = mesos_slaves super().__init__( system_paasta_config=system_paasta_config, service_discovery_providers=get_service_discovery_providers( system_paasta_config ), )
Apache License 2.0
sevagas/macro_pack
src/modules/payload_builder_factory.py
PayloadBuilderFactory._handleOfficeFormats
python
def _handleOfficeFormats(self, mpSession): if MSTypes.XL in mpSession.outputFileType: generator = ExcelGenerator(mpSession) elif MSTypes.WD in mpSession.outputFileType: generator = WordGenerator(mpSession) elif MSTypes.PPT in mpSession.outputFileType: generator = PowerPointGenerator(mpSession) elif MSTypes.MPP == mpSession.outputFileType: generator = MSProjectGenerator(mpSession) elif MSTypes.VSD in mpSession.outputFileType: generator = VisioGenerator(mpSession) elif MSTypes.ACC in mpSession.outputFileType: generator = AccessGenerator(mpSession) return generator
Handle MS Office output formats generation
https://github.com/sevagas/macro_pack/blob/38dd98076ac98fe92145f484e58ae6d2a871d5df/src/modules/payload_builder_factory.py#L31-L48
import sys from modules.excel_gen import ExcelGenerator from modules.word_gen import WordGenerator from modules.ppt_gen import PowerPointGenerator from modules.msproject_gen import MSProjectGenerator from modules.vba_gen import VBAGenerator from modules.vbs_gen import VBSGenerator from modules.hta_gen import HTAGenerator from modules.sct_gen import SCTGenerator from modules.wsf_gen import WSFGenerator from modules.visio_gen import VisioGenerator from modules.access_gen import AccessGenerator from modules.scf_gen import SCFGenerator from modules.xsl_gen import XSLGenerator from modules.url_gen import UrlShortcutGenerator from modules.glk_gen import GlkGenerator from modules.lnk_gen import LNKGenerator from modules.settingsms_gen import SettingsShortcutGenerator from modules.libraryms_gen import LibraryShortcutGenerator from modules.inf_gen import InfGenerator from modules.csproj_gen import CsProjGenerator from modules.iqy_gen import IqyGenerator from common.utils import MSTypes class PayloadBuilderFactory():
Apache License 2.0
siviltaram/persona-dialogue-generation
parlai/agents/mlb_vqa/mlb_vqa.py
VqaDictionaryAgent.add_to_ans_dict
python
def add_to_ans_dict(self, token): self.ansfreq[token] += 1 if token not in self.ans2ind: index = len(self.ans2ind) self.ans2ind[token] = index self.ind2ans[index] = token
Builds dictionary from the list of provided tokens. Only adds words contained in self.embedding_words, if not None.
https://github.com/siviltaram/persona-dialogue-generation/blob/3cc800ffe3c5a8d16ed26522cda839acfab8d417/parlai/agents/mlb_vqa/mlb_vqa.py#L158-L166
from parlai.core.agents import Agent import torch.nn as nn import torch import os from collections import defaultdict, Counter import numpy as np import re from .mlb_modules import MlbAtt, MlbNoAtt def escape(s): return s.replace('\n', '\\n').replace('\t', '\\t').replace('\r', '\\r') def unescape(s): return s.replace('\\n', '\n').replace('\\t', '\t').replace('\\r', '\r') class VqaDictionaryAgent(Agent): @staticmethod def add_cmdline_args(argparser): dictionary = argparser.add_argument_group('Dictionary Arguments') dictionary.add_argument( '--dict-file', help='if set, the dictionary will automatically save to this path' + ' during shutdown') dictionary.add_argument( '--dict-initpath', help='path to a saved dictionary to load tokens / counts from to ' + 'seed the dictionary with initial tokens and/or frequencies') dictionary.add_argument( '--dict-maxexs', default=300000, type=int, help='max number of examples to build dict on') dictionary.add_argument('-smp', '--samplingans', type='bool', default=True) dictionary.add_argument('--nans', type=int, default=2000) dictionary.add_argument('--maxlength', type=int, default=16) dictionary.add_argument('--minwcount', type=int, default=0) dictionary.add_argument('--nlp', default='mcb') def __init__(self, opt, shared=None): super(VqaDictionaryAgent, self).__init__(opt) self.id = 'VqaDictionary' self.null_token = '__NULL__' self.unk_token = '__UNK__' if shared: self.freq = shared.get('freq', {}) self.tok2ind = shared.get('tok2ind', {}) self.ind2tok = shared.get('ind2tok', {}) self.ans2ind = shared.get('ans2ind', {}) self.ind2ans = shared.get('ind2ans', {}) else: self.freq = defaultdict(int) self.ansfreq = defaultdict(int) self.ans2ques = defaultdict(list) self.tok2ind = {} self.ind2tok = {} self.ans2ind = {} self.ind2ans = {} if self.null_token: self.tok2ind[self.null_token] = 0 self.ind2tok[0] = self.null_token if self.unk_token: index = len(self.tok2ind) self.tok2ind[self.unk_token] = index self.ind2tok[index] = self.unk_token if opt.get('dict_file') and os.path.isfile(opt['dict_file']): self.load(opt['dict_file']) if not shared: if self.null_token: self.freq[self.null_token] = 1000000002 if self.unk_token: self.freq[self.unk_token] = 1000000000 if opt.get('dict_file'): self.save_path = opt['dict_file'] def __len__(self): return len(self.tok2ind) def add_to_ques_dict(self, tokens): for token in tokens: self.freq[token] += 1 if token not in self.tok2ind: index = len(self.tok2ind) self.tok2ind[token] = index self.ind2tok[index] = token
MIT License
kartverket/midgard
midgard/parsers/_parser_rinex.py
RinexParser.time_of_last_obs
python
def time_of_last_obs(self) -> RinexHeader: return RinexHeader( marker="TIME OF LAST OBS", fields={ "year": (0, 6), "month": (6, 12), "day": (12, 18), "hour": (18, 24), "minute": (24, 30), "second": (30, 43), "time_sys": (48, 51), }, parser=self.parse_time_of_last_obs, )
Parser definition for RINEX header label 'TIME OF LAST OBS' Example: ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8 2016 03 01 23 59 59.0000000 GPS TIME OF LAST OBS
https://github.com/kartverket/midgard/blob/faf8963c9e0e49255c90a60ba5671277912777fd/midgard/parsers/_parser_rinex.py#L661-L680
from datetime import datetime import functools import itertools import pathlib from typing import Any, Callable, cast, Dict, List, NamedTuple, Optional, Tuple, Union import numpy as np from midgard.dev import exceptions from midgard.dev import log from midgard.parsers._parser import Parser _FieldDef = Dict[str, Tuple[int, int]] _FieldStr = Dict[str, str] _FieldVal = Dict[str, Any] _FieldCache = List[_FieldStr] class RinexHeader(NamedTuple): marker: str fields: _FieldDef parser: Callable[[_FieldStr], _FieldVal] def parser_cache( func: Callable[["RinexParser", _FieldStr, _FieldCache], _FieldVal], ) -> Callable[["RinexParser", _FieldStr], _FieldVal]: func.cache = list() @functools.wraps(func) def wrapper_parser_cache(self: "RinexParser", fields: _FieldStr) -> _FieldVal: value = func(self, fields, func.cache) func.cache.append(fields) return value return wrapper_parser_cache class RinexParser(Parser): name = "Rinex" def __init__( self, file_path: Union[str, pathlib.Path], encoding: Optional[str] = None, logger=print, sampling_rate: Optional[int] = None, strict: bool = False, ) -> None: super().__init__(file_path, encoding) self.meta["__kwargs__"] = dict( file_path=file_path, encoding=encoding, sampling_rate=sampling_rate, strict=strict ) self.header: Dict[str, Any] = dict() self.samling_rate = sampling_rate self.error = cast(Callable[[str], None], self._raise_error if strict else log.warn) def _raise_error(self, text: str) -> None: raise exceptions.ParserError(text) def get_rinex_version_type(self) -> Dict[str, str]: header_def = self.rinex_version__type with open(self.file_path, mode="r", encoding=self.file_encoding) as fid: for line in fid: marker = line[60:80].strip() if marker != header_def.marker: self.error(f"Wrong marker {marker!r} before version information") continue return {k: line[slice(*v)].strip() for k, v in header_def.fields.items()} raise exceptions.ParserError(f"No information about Rinex version found in {self.file_path}") @property def mandatory_headers(self) -> Tuple[RinexHeader, ...]: raise NotImplementedError @property def optional_headers(self) -> Tuple[RinexHeader, ...]: raise NotImplementedError def read_data(self) -> None: with open(self.file_path, mode="r", encoding=self.file_encoding) as fid: self.read_header(fid) self.read_epochs(fid) self.structure_data() def read_header(self, fid) -> None: headers = {h.marker: h for h in self.mandatory_headers + self.optional_headers} mandatory_markers = {h.marker for h in self.mandatory_headers} for line in fid: marker = line[60:80].strip() if marker == "END OF HEADER": break if marker in headers: header_def = headers[marker] fields = {k: line[slice(*v)].strip() for k, v in header_def.fields.items()} self.header.update(header_def.parser(fields)) if marker in mandatory_markers: mandatory_markers.remove(marker) else: self.error(f"Unknown {self.name} header {marker!r}") for marker in mandatory_markers: self.error(f"Mandatory {self.name} header {marker!r} not found") def read_epochs(self, fid) -> None: sampling_rate = 400 prev_epoch = datetime.min for epoch_line in fid: num_data_lines, epoch_info = self.parse_epoch_line(epoch_line) data_lines = itertools.islice(fid, num_data_lines) if sampling_rate is not None: epoch = epoch_info["epoch"] if (epoch - prev_epoch).total_seconds() < sampling_rate: for line in data_lines: pass continue prev_epoch = epoch data_info = self.parse_data_lines(data_lines, epoch_info) def parse_epoch_line(self, line): raise NotImplementedError def parse_data_lines(self, lines, epoch_info): raise NotImplementedError def structure_data(self) -> None: for system, sys_data in self.data.items(): self.data[system] = {k: np.array(v) for k, v in sys_data.items()} def parse_approx_position(self, fields: _FieldStr) -> _FieldVal: pos = np.array((float(fields["pos_x"]), float(fields["pos_y"]), float(fields["pos_z"]))) return dict(pos=pos) def parse_comment(self, fields: _FieldStr) -> _FieldVal: comment = self.header.setdefault("comment", []) comment.append(fields["comment"]) return dict(comment=comment) def parse_float(self, fields: _FieldStr) -> _FieldVal: return {k: float(v) for k, v in fields.items() if v} def parse_glonass_code_phase_bias(self, fields: _FieldStr) -> _FieldVal: glonass_bias = self.header.setdefault("glonass_bias", {}) for field in fields.values(): if field: type_, bias = field.split()[0:2] glonass_bias.update({type_: float(bias)}) return glonass_bias def parse_glonass_slot(self, fields: _FieldStr) -> _FieldVal: glonass_slot = self.header.setdefault("glonass_slot", {}) if "num_satellite" in fields: num_sat = fields["num_satellite"] del fields["num_satellite"] for field in fields.values(): if field: slot, freq = field.split()[0:2] glonass_slot.update({slot: int(freq)}) return glonass_slot def parse_integer(self, fields: _FieldStr) -> _FieldVal: return {k: int(v) for k, v in fields.items() if v} def parse_leap_seconds(self, fields: _FieldStr) -> _FieldVal: return dict(leap_seconds={k: v for k, v in fields.items() if v}) @parser_cache def parse_phase_shift(self, fields: _FieldStr, cache: _FieldCache) -> _FieldVal: phase_shift = self.header.setdefault("phase_shift", {}) if fields["sat_sys"]: sat_sys = fields["sat_sys"] obs_type = fields["obs_type"] phase_shift.setdefault(sat_sys, {}).update({obs_type: {}}) else: sat_sys, obs_type = next((c["sat_sys"], c["obs_type"]) for c in cache[::-1] if c["sat_sys"]) if fields["correction"]: phase_shift[sat_sys][obs_type].update( {"corr": float(fields["correction"]), "sat": fields["satellites"].split()} ) return phase_shift if fields["satellites"]: phase_shift[sat_sys][obs_type]["sat"].extend(fields["satellites"].split()) return phase_shift def parse_scale_factor(self, fields: _FieldStr) -> _FieldVal: return NotImplementedError def parse_string(self, fields: _FieldStr) -> _FieldVal: return {k: v for k, v in fields.items() if v} def parse_sys_dcbs_applied(self, fields: _FieldStr) -> _FieldVal: return fields @parser_cache def parse_sys_obs_types(self, fields: _FieldStr, cache: _FieldCache) -> _FieldVal: satellite_sys = fields["satellite_sys"] prev_idx = -1 while not satellite_sys: satellite_sys = cache[prev_idx]["satellite_sys"] prev_idx -= 1 obs_types = self.header.get("obs_types", dict()) obs_list = obs_types.setdefault(satellite_sys, list()) for field in sorted([f for f in fields if f.startswith("type_")]): if fields[field]: obs_list.append(fields[field]) return dict(obs_types=obs_types) def parse_sys_pcvs_applied(self, fields: _FieldStr) -> _FieldVal: return fields def parse_time_of_first_obs(self, fields: _FieldStr) -> _FieldVal: return fields def parse_time_of_last_obs(self, fields: _FieldStr) -> _FieldVal: return fields @property def rinex_version__type(self) -> RinexHeader: return RinexHeader( marker="RINEX VERSION / TYPE", fields={"rinex_version": (0, 20), "file_type": (20, 21), "sat_sys": (40, 41)}, parser=self.parse_string, ) @property def pgm__run_by__date(self) -> RinexHeader: return RinexHeader( marker="PGM / RUN BY / DATE", fields={"program": (0, 20), "run_by": (20, 40), "file_created": (40, 60)}, parser=self.parse_string, ) @property def comment(self) -> RinexHeader: return RinexHeader(marker="COMMENT", fields={"comment": (0, 60)}, parser=self.parse_comment) @property def marker_name(self) -> RinexHeader: return RinexHeader(marker="MARKER NAME", fields={"marker_name": (0, 60)}, parser=self.parse_string) @property def marker_number(self) -> RinexHeader: return RinexHeader(marker="MARKER NUMBER", fields={"marker_number": (0, 20)}, parser=self.parse_string) @property def marker_type(self) -> RinexHeader: return RinexHeader(marker="MARKER TYPE", fields={"marker_type": (0, 20)}, parser=self.parse_string) @property def observer__agency(self) -> RinexHeader: return RinexHeader( marker="OBSERVER / AGENCY", fields={"observer": (0, 20), "agency": (20, 60)}, parser=self.parse_string ) @property def rec_num__type__vers(self) -> RinexHeader: return RinexHeader( marker="REC # / TYPE / VERS", fields={"receiver_number": (0, 20), "receiver_type": (20, 40), "receiver_version": (40, 60)}, parser=self.parse_string, ) @property def ant_num__type(self) -> RinexHeader: return RinexHeader( marker="ANT # / TYPE", fields={"antenna_number": (0, 20), "antenna_type": (20, 40)}, parser=self.parse_string, ) @property def approx_position_xyz(self) -> RinexHeader: return RinexHeader( marker="APPROX POSITION XYZ", fields={"pos_x": (0, 14), "pos_y": (14, 28), "pos_z": (28, 42)}, parser=self.parse_approx_position, ) @property def antenna__delta_hen(self) -> RinexHeader: return RinexHeader( marker="ANTENNA: DELTA H/E/N", fields={"antenna_height": (0, 14), "antenna_east": (14, 28), "antenna_north": (28, 42)}, parser=self.parse_float, ) @property def antenna__delta_xyz(self) -> RinexHeader: return RinexHeader( marker="ANTENNA: DELTA X/Y/Z", fields={"ant_vehicle_x": (0, 14), "ant_vehicle_y": (14, 28), "ant_vehicle_z": (28, 42)}, parser=self.parse_float, ) @property def sys__num__obs_types(self) -> RinexHeader: return RinexHeader( marker="SYS / # / OBS TYPES", fields={ "satellite_sys": (0, 1), "num_obstypes": (3, 6), "type_01": (7, 10), "type_02": (11, 14), "type_03": (15, 18), "type_04": (19, 22), "type_05": (23, 26), "type_06": (27, 30), "type_07": (31, 34), "type_08": (35, 38), "type_09": (39, 42), "type_10": (43, 46), "type_11": (47, 50), "type_12": (51, 54), "type_13": (55, 58), }, parser=self.parse_sys_obs_types, ) @property def signal_strength_unit(self) -> RinexHeader: return RinexHeader( marker="SIGNAL STRENGTH UNIT", fields={"signal_strength_unit": (0, 20)}, parser=self.parse_string ) @property def interval(self) -> RinexHeader: return RinexHeader(marker="INTERVAL", fields={"interval": (0, 10)}, parser=self.parse_float) @property def time_of_first_obs(self) -> RinexHeader: return RinexHeader( marker="TIME OF FIRST OBS", fields={ "year": (0, 6), "month": (6, 12), "day": (12, 18), "hour": (18, 24), "minute": (24, 30), "second": (30, 43), "time_sys": (48, 51), }, parser=self.parse_time_of_first_obs, ) @property
MIT License
rucio/rucio
lib/rucio/api/config.py
sections
python
def sections(issuer=None, vo='def'): kwargs = {'issuer': issuer} if not permission.has_permission(issuer=issuer, vo=vo, action='config_sections', kwargs=kwargs): raise exception.AccessDenied('%s cannot retrieve sections' % issuer) return config.sections()
Return a list of the sections available. :param issuer: The issuer account. :param vo: The VO to act on. :returns: ['section_name', ...]
https://github.com/rucio/rucio/blob/6a6092798bb8220dec07328d0e3f7f42d1b931cd/lib/rucio/api/config.py#L27-L39
from rucio.api import permission from rucio.common import exception from rucio.core import config
Apache License 2.0
protothis/python-synology
src/synology_dsm/api/surveillance_station/__init__.py
SynoSurveillanceStation.get_camera_live_view_path
python
def get_camera_live_view_path(self, camera_id, video_format=None): if video_format: return getattr(self._cameras_by_id[camera_id].live_view, video_format) return self._cameras_by_id[camera_id].live_view
Return camera live view path matching camera_id. Args: camera_id: ID of the camera we want to get the live view path. video_format: mjpeg_http | multicast | mxpeg_http | rtsp_http | rtsp.
https://github.com/protothis/python-synology/blob/645b818be2013231ac126c6962d2f9092a5c3aae/src/synology_dsm/api/surveillance_station/__init__.py#L64-L73
from .camera import SynoCamera from .const import MOTION_DETECTION_BY_SURVEILLANCE from .const import MOTION_DETECTION_DISABLED class SynoSurveillanceStation: API_KEY = "SYNO.SurveillanceStation.*" INFO_API_KEY = "SYNO.SurveillanceStation.Info" CAMERA_API_KEY = "SYNO.SurveillanceStation.Camera" CAMERA_EVENT_API_KEY = "SYNO.SurveillanceStation.Camera.Event" HOME_MODE_API_KEY = "SYNO.SurveillanceStation.HomeMode" SNAPSHOT_API_KEY = "SYNO.SurveillanceStation.SnapShot" def __init__(self, dsm): self._dsm = dsm self._cameras_by_id = {} def update(self): self._cameras_by_id = {} list_data = self._dsm.get(self.CAMERA_API_KEY, "List", max_version=7)["data"] for camera_data in list_data["cameras"]: if camera_data["id"] in self._cameras_by_id: self._cameras_by_id[camera_data["id"]].update(camera_data) else: self._cameras_by_id[camera_data["id"]] = SynoCamera(camera_data) for camera_id in self._cameras_by_id: self._cameras_by_id[camera_id].update_motion_detection( self._dsm.get( self.CAMERA_EVENT_API_KEY, "MotionEnum", {"camId": camera_id} )["data"] ) if not self._cameras_by_id: return live_view_datas = self._dsm.get( self.CAMERA_API_KEY, "GetLiveViewPath", {"idList": ",".join(str(k) for k in self._cameras_by_id)}, )["data"] for live_view_data in live_view_datas: self._cameras_by_id[live_view_data["id"]].live_view.update(live_view_data) def get_info(self): return self._dsm.get(self.INFO_API_KEY, "GetInfo") def get_all_cameras(self): return self._cameras_by_id.values() def get_camera(self, camera_id): return self._cameras_by_id[camera_id]
MIT License
aerospike/aerospike-client-python
aerospike_helpers/operations/hll_operations.py
hll_fold
python
def hll_fold(bin_name, index_bit_count): op_dict = { OP_KEY: aerospike.OP_HLL_FOLD, BIN_KEY: bin_name, INDEX_BIT_COUNT_KEY: index_bit_count } return op_dict
Creates a hll_fold operation to be used with operate, or operate_ordered. Servers folds index_bit_count to the specified value. This can only be applied when minhash bit count on the HLL bin is 0. Server does not return a value. Args: bin_name (str): The name of the bin to be operated on. index_bit_count: number of index bits. Must be bewtween 4 and 16 inclusive.
https://github.com/aerospike/aerospike-client-python/blob/59fa0d36aa899a164282643fe49b27d12aaf323f/aerospike_helpers/operations/hll_operations.py#L202-L219
import aerospike OP_KEY = "op" BIN_KEY = "bin" HLL_POLICY_KEY = "hll_policy" INDEX_BIT_COUNT_KEY = "index_bit_count" MH_BIT_COUNT_KEY = "mh_bit_count" VALUE_LIST_KEY = "value_list" def hll_add(bin_name, values, index_bit_count=None, mh_bit_count=None, policy=None): op_dict = { OP_KEY: aerospike.OP_HLL_ADD, BIN_KEY: bin_name, VALUE_LIST_KEY: values, INDEX_BIT_COUNT_KEY: -1 if index_bit_count is None else index_bit_count, MH_BIT_COUNT_KEY: -1 if mh_bit_count is None else mh_bit_count } if policy: op_dict[HLL_POLICY_KEY] = policy return op_dict def hll_describe(bin_name): op_dict = { OP_KEY: aerospike.OP_HLL_DESCRIBE, BIN_KEY: bin_name, } return op_dict
Apache License 2.0
ant4g0nist/lisa.py
lisa.py
FileExtract.push_offset_and_seek
python
def push_offset_and_seek(self, offset): self.offsets.append(self.file.tell()) self.file.seek(offset, 0)
Push the current file offset and seek to "offset"
https://github.com/ant4g0nist/lisa.py/blob/2d1f81a1b1286852aac96f56f2807ea462e9f728/lisa.py#L587-L590
import io import os import re import abc import cmd import sys import lldb import stat import uuid import fcntl import shlex import string import struct import fnmatch import pathlib import sqlite3 import termios import capstone import optparse import platform import functools import subprocess BLK = "\033[30m" BLU = "\033[34m" CYN = "\033[36m" GRN = "\033[32m" MAG = "\033[35m" RED = "\033[31m" RST = "\033[0m" YEL = "\033[33m" WHT = "\033[37m" BASE00 = '#657b83' VERTICAL_LINE = "\u2502" HORIZONTAL_LINE = "\u2500" __prompt__ = f"'(lisa:>) '" MINIMUM_RECURSION_LENGTH = 300 NO_CHANGE = 0 CHANGE_TO_EXPLOITABLE = 1 CHANGE_TO_NOT_EXPLOITABLE = 2 CPU_TYPE_I386 = 7 CPU_ARCH_ABI64 = 0x1000000 CPU_TYPE_X86_64 = CPU_TYPE_I386 | CPU_ARCH_ABI64 CPU_TYPE_ARM = 12 CPU_TYPE_ARM64 = CPU_TYPE_ARM | CPU_ARCH_ABI64 dlog = lambda msg: print(f"{GRN}{msg}{RST}") warnlog = lambda msg: print(f"{YEL}{msg}{RST}") errlog = lambda msg: print(f"{RED}{msg}{RST}") _, tty_columns = struct.unpack("hh", fcntl.ioctl(1, termios.TIOCGWINSZ, "1234")) def contextTitle(m=None): line_color= YEL msg_color = GRN if not m: print(f"{line_color}{HORIZONTAL_LINE * (tty_columns)} {line_color}{RST}") return trail_len = int((tty_columns - len(m) - 4)/2) title = f"{line_color}{'-'*(trail_len)}{GRN}[ {RED}{m} {GRN}]{line_color}{'-'*(trail_len)}{RST}" print(title) def get_host_pagesize(): host_machine = get_host_machine() target_arch = get_target_triple().split('-')[0] page_size = 0 if host_machine == target_arch: page_size = run_shell_command('getconf PAGE_SIZE').stdout.rstrip() elif host_machine=="arm64" and target_arch=="x86_64": page_size = run_shell_command('arch -x86_64 getconf PAGE_SIZE').stdout.rstrip() else: errlog("get_host_pagesize failed") return -1 return int(page_size) def get_host_machine(): return platform.machine() def get_host_arch(): if get_host_machine() == "arm64": return CPU_TYPE_ARM64 elif get_host_machine() == "x86_64": return CPU_TYPE_X86_64 def cpu_to_string(cpu): if cpu == CPU_TYPE_X86_64: return "x86_64" elif cpu == CPU_TYPE_ARM64: return "arm64" def get_target_triple(): return lldb.debugger.GetSelectedTarget().triple def get_target_arch(): arch = lldb.debugger.GetSelectedTarget().triple.split('-')[0] if arch == "arm64" or arch=="arm64e": return AARCH64() elif arch == "x86_64": return X8664() else: errlog(f"Architecture {arch} not supported") def load_manual(): global inst_map inst_map = {} archs = ["arm", "x86_64"] for arch in archs: path = pathlib.Path(__file__).parent.absolute() dbpath = os.path.join(path, "archs", arch + ".sql") con = sqlite3.connect(":memory:") con.text_factory = str con.executescript(open(dbpath).read()) cur = con.cursor() cur.execute("SELECT mnem, description FROM instructions") con.commit() rows = cur.fetchall() for row in rows: inst = row[0] lines = row[1].replace("\r\n", "\n").split("\n") inst_map[inst] = lines con.close() for (inst, data) in inst_map.items(): data = data[0] if data[0:3] == "-R:": ref = data[3:] if ref in inst_map: inst_map[inst] = inst_map[ref] dlog(f"Manual loaded for architecture: {arch}") return True def run_shell_command(command, shell=True): return subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell) def make_run_command(command): def runCommand(debugger, input, exe_ctx, result, _): command.result = result command.context = exe_ctx splitInput = command.lex(input) options = command.options() if len(options) == 0: if "--" not in splitInput: splitInput.insert(0, "--") parser = option_parser_for_command(command) (options, args) = parser.parse_args(splitInput) if len(args) > len(command.args()): overhead = len(args) - len(command.args()) head = args[: overhead + 1] args = [" ".join(head)] + args[-overhead:] if validate_args_for_command(args, command): command.run(args, options) runCommand.__doc__ = help_for_command(command) return runCommand def load_command(module, command, filename): func = make_run_command(command) name = command.name() key = filename + "_" + name helpText = ( command.description().strip().splitlines()[0] ) module._loadedFunctions[key] = func functionName = "__" + key lldb.debugger.HandleCommand( "script " + functionName + " = sys.modules['" + module.__name__ + "']._loadedFunctions['" + key + "']" ) lldb.debugger.HandleCommand( 'command script add --help "{help}" --function {function} {name}'.format( help=helpText.replace('"', '\\"'), function=functionName, name=name, ) ) def validate_args_for_command(args, command): if len(args) < len(command.args()): defaultArgs = [arg.default for arg in command.args()] defaultArgsToAppend = defaultArgs[len(args) :] index = len(args) for defaultArg in defaultArgsToAppend: if defaultArg: arg = command.args()[index] print("Whoops! You are missing the <" + arg.argName + "> argument.") print("\nUsage: " + usage_for_command(command)) return index += 1 args.extend(defaultArgsToAppend) return True def option_parser_for_command(command): parser = optparse.OptionParser() for argument in command.options(): if argument.boolean: parser.add_option( argument.shortName, argument.longName, dest=argument.argName, help=argument.help, action=("store_false" if argument.default else "store_true"), ) else: parser.add_option( argument.shortName, argument.longName, dest=argument.argName, help=argument.help, default=argument.default, ) return parser def help_for_command(command): help = command.description() argSyntax = "" optionSyntax = "" if command.args(): help += "\n\nArguments:" for arg in command.args(): help += "\n <" + arg.argName + ">; " if arg.argType: help += "Type: " + arg.argType + "; " help += arg.help argSyntax += " <" + arg.argName + ">" if command.options(): help += "\n\nOptions:" for option in command.options(): if option.longName and option.shortName: optionFlag = option.longName + "/" + option.shortName elif option.longName: optionFlag = option.longName else: optionFlag = option.shortName help += "\n " + optionFlag + " " if not option.boolean: help += "<" + option.argName + ">; Type: " + option.argType help += "; " + option.help optionSyntax += " [{name}{arg}]".format( name=(option.longName or option.shortName), arg=("" if option.boolean else ("=" + option.argName)), ) help += "\n\nSyntax: " + command.name() + optionSyntax + argSyntax help += "\n\nThis command is implemented as %s" % ( command.__class__.__name__, ) return help def usage_for_command(command): usage = command.name() for arg in command.args(): if arg.default: usage += " [" + arg.argName + "]" else: usage += " " + arg.argName return usage class CommandArgument: def __init__( self, short="", long="", arg="", type="", help="", default="", boolean=False): self.shortName = short self.longName = long self.argName = arg self.argType = type self.help = help self.default = default self.boolean = boolean class LLDBCommand: def name(self): return None def options(self): return [] def args(self): return [] def description(self): return "" def lex(self, commandLine): return shlex.split(commandLine) def run(self, arguments, option): pass colormap = [ 0x000000, 0x560000, 0x640000, 0x750000, 0x870000, 0x9b0000, 0xb00000, 0xc60000, 0xdd0000, 0xf50000, 0xff0f0f, 0xff2828, 0xff4343, 0xff5e5e, 0xff7979, 0xfe9595, 0x4c1600, 0x561900, 0x641e00, 0x752300, 0x872800, 0x9b2e00, 0xb03400, 0xc63b00, 0xdd4200, 0xf54900, 0xff570f, 0xff6928, 0xff7b43, 0xff8e5e, 0xffa179, 0xfeb595, 0x4c3900, 0x564000, 0x644b00, 0x755700, 0x876500, 0x9b7400, 0xb08400, 0xc69400, 0xdda600, 0xf5b800, 0xffc30f, 0xffc928, 0xffd043, 0xffd65e, 0xffdd79, 0xfee495, 0x4c4c00, 0x565600, 0x646400, 0x757500, 0x878700, 0x9b9b00, 0xb0b000, 0xc6c600, 0xdddd00, 0xf5f500, 0xffff0f, 0xffff28, 0xffff43, 0xffff5e, 0xffff79, 0xfffe95, 0x324c00, 0x395600, 0x426400, 0x4e7500, 0x5a8700, 0x679b00, 0x75b000, 0x84c600, 0x93dd00, 0xa3f500, 0xafff0f, 0xb7ff28, 0xc0ff43, 0xc9ff5e, 0xd2ff79, 0xdbfe95, 0x1f4c00, 0x235600, 0x296400, 0x307500, 0x388700, 0x409b00, 0x49b000, 0x52c600, 0x5cdd00, 0x66f500, 0x73ff0f, 0x82ff28, 0x91ff43, 0xa1ff5e, 0xb1ff79, 0xc1fe95, 0x004c00, 0x005600, 0x006400, 0x007500, 0x008700, 0x009b00, 0x00b000, 0x00c600, 0x00dd00, 0x00f500, 0x0fff0f, 0x28ff28, 0x43ff43, 0x5eff5e, 0x79ff79, 0x95fe95, 0x004c19, 0x00561c, 0x006421, 0x007527, 0x00872d, 0x009b33, 0x00b03a, 0x00c642, 0x00dd49, 0x00f551, 0x0fff5f, 0x28ff70, 0x43ff81, 0x5eff93, 0x79ffa6, 0x95feb8, 0x004c4c, 0x005656, 0x006464, 0x007575, 0x008787, 0x009b9b, 0x00b0b0, 0x00c6c6, 0x00dddd, 0x00f5f5, 0x0ffffe, 0x28fffe, 0x43fffe, 0x5efffe, 0x79ffff, 0x95fffe, 0x00394c, 0x004056, 0x004b64, 0x005775, 0x006587, 0x00749b, 0x0084b0, 0x0094c6, 0x00a6dd, 0x00b8f5, 0x0fc3ff, 0x28c9ff, 0x43d0ff, 0x5ed6ff, 0x79ddff, 0x95e4fe, 0x00264c, 0x002b56, 0x003264, 0x003a75, 0x004387, 0x004d9b, 0x0058b0, 0x0063c6, 0x006edd, 0x007af5, 0x0f87ff, 0x2893ff, 0x43a1ff, 0x5eaeff, 0x79bcff, 0x95cafe, 0x00134c, 0x001556, 0x001964, 0x001d75, 0x002187, 0x00269b, 0x002cb0, 0x0031c6, 0x0037dd, 0x003df5, 0x0f4bff, 0x285eff, 0x4372ff, 0x5e86ff, 0x799aff, 0x95b0fe, 0x19004c, 0x1c0056, 0x210064, 0x270075, 0x2d0087, 0x33009b, 0x3a00b0, 0x4200c6, 0x4900dd, 0x5100f5, 0x5f0fff, 0x7028ff, 0x8143ff, 0x935eff, 0xa679ff, 0xb895fe, 0x33004c, 0x390056, 0x420064, 0x4e0075, 0x5a0087, 0x67009b, 0x7500b0, 0x8400c6, 0x9300dd, 0xa300f5, 0xaf0fff, 0xb728ff, 0xc043ff, 0xc95eff, 0xd279ff, 0xdb95fe, 0x4c004c, 0x560056, 0x640064, 0x750075, 0x870087, 0x9b009b, 0xb000b0, 0xc600c6, 0xdd00dd, 0xf500f5, 0xfe0fff, 0xfe28ff, 0xfe43ff, 0xfe5eff, 0xfe79ff, 0xfe95fe, 0x4c0032, 0x560039, 0x640042, 0x75004e, 0x87005a, 0x9b0067, 0xb00075, 0xc60084, 0xdd0093, 0xf500a3, 0xff0faf, 0xff28b7, 0xff43c0, 0xff5ec9, 0xff79d2, 0xffffff, ] def expand(v): return ( ((v)>>16 & 0xFF), ((v)>>8 & 0xFF), ((v)>>0 & 0xFF) ) def format_offset(offset): return '0x%5x%03x' % (offset >> 12, offset & 0xFFF) def visual_hexdump(buffer, start=0, end=None, columns=64): count = (end or -1) - start read = 0 while read != count: if end == None: to_read = io.DEFAULT_BUFFER_SIZE else: to_read = min(count - read, io.DEFAULT_BUFFER_SIZE) buf = buffer[read:read+to_read] for i in range(0, len(buf), columns*2): offset = start + read + i print(format_offset(offset), end=' ') for j in range(0, columns): if i + j >= len(buf): break elif i + j + columns >= len(buf): print('\x1B[0m\x1B[38;2;%d;%d;%dm▀' % expand(colormap[buf[i + j]]), end='') else: print('\x1B[38;2;%d;%d;%dm\x1B[48;2;%d;%d;%dm▀' % (expand(colormap[buf[i + j]]) + expand(colormap[buf[i + j + columns]])), end='') print('\x1B[m') read += len(buf) HEADER = '┌────────────────┬─────────────────────────┬─────────────────────────┬────────┬────────┐' FOOTER = RST+'└────────────────┴─────────────────────────┴─────────────────────────┴────────┴────────┘' LINE_FORMATTER = '│' + '' + '{:016x}' + '│ {}' + '│{}' + '│' + RST def hexmod(b: int) -> str: return hex(b)[2:].rjust(2, '0') def colored(b: int) -> (str, str): ch = chr(b) hx = hexmod(b) if '\x00' == ch: return RST + hx , RST + "." elif ch in string.ascii_letters + string.digits + string.punctuation: return CYN + hx, CYN + ch elif ch in string.whitespace: return GRN + hx, ' ' if ' ' == ch else GRN + '_' return YEL + hx, YEL + '.' def hexdump(buf, address): cache = {hexmod(b): colored(b) for b in range(256)} cache[' '] = (' ', ' ') print(HEADER) cur = 0 row = address line = buf[cur:cur+16] while line: line_hex = line.hex().ljust(32) hexbytes = '' printable = '' for i in range(0, len(line_hex), 2): hbyte, abyte = cache[line_hex[i:i+2]] hexbytes += hbyte + ' ' if i != 14 else hbyte + ' ┊ ' printable += abyte if i != 14 else abyte + '┊' print(LINE_FORMATTER.format(row, hexbytes, printable)) row += 0x10 cur += 0x10 line = buf[cur:cur+16] print(FOOTER) def swap_unpack_char(): if struct.pack('H', 1).startswith("\x00"): return '<' return '>' def dump_hex_bytes(addr, s, bytes_per_line=8): i = 0 line = '' for ch in s: if (i % bytes_per_line) == 0: if line: print(line) line = '%#8.8x: ' % (addr + i) line += "%02x " % ch i += 1 print(line) def dump_hex_byte_string_diff(addr, a, b, bytes_per_line=16): i = 0 line = '' a_len = len(a) b_len = len(b) if a_len < b_len: max_len = b_len else: max_len = a_len tty_colors = TerminalColors(True) for i in range(max_len): ch = None if i < a_len: ch_a = a[i] ch = ch_a else: ch_a = None if i < b_len: ch_b = b[i] if not ch: ch = ch_b else: ch_b = None mismatch = ch_a != ch_b if (i % bytes_per_line) == 0: if line: print(line) line = '%#8.8x: ' % (addr + i) if mismatch: line += RED line += "%02X " % ord(ch) if mismatch: line += RST i += 1 print(line) def evaluateInputExpression(expression, printErrors=True): frame = ( lldb.debugger.GetSelectedTarget() .GetProcess() .GetSelectedThread() .GetSelectedFrame() ) options = lldb.SBExpressionOptions() options.SetTrapExceptions(False) value = frame.EvaluateExpression(expression, options) error = value.GetError() if printErrors and error.Fail(): errlog(error) return value class FileExtract: def __init__(self, f, b='='): self.file = f self.byte_order = b self.offsets = list() def set_byte_order(self, b): if b == 'big': self.byte_order = '>' elif b == 'little': self.byte_order = '<' elif b == 'swap': self.byte_order = swap_unpack_char() elif b == 'native': self.byte_order = '=' elif b == '<' or b == '>' or b == '@' or b == '=': self.byte_order = b else: print("error: invalid byte order specified: '%s'" % b) def is_in_memory(self): return False def seek(self, offset, whence=0): if self.file: return self.file.seek(offset, whence) raise ValueError def tell(self): if self.file: return self.file.tell() raise ValueError def read_size(self, byte_size): s = self.file.read(byte_size) if len(s) != byte_size: return None return s
Apache License 2.0
daboross/screeps-starter-python
src/defs/classes/misc_obj.py
RoomObject.__init__
python
def __init__(self, effects: _Effect, pos: RoomPosition, room: Room) -> None: self.effects = effects self.pos = pos self.room = room
WARNING: This constructor is purely for type completion, and does not exist in the game.
https://github.com/daboross/screeps-starter-python/blob/fe93bcd4eb3e757f0977627ce04f296c15fb51e6/src/defs/classes/misc_obj.py#L41-L47
from typing import Optional, Type, Union, Dict, List from .memory import _Memory from .room import Room, RoomPosition from .structures import Structure from .creep import Creep class _Effect: def __init__(self, effect: int, level: Optional[int], ticksRemaining: int): self.effect = effect self.level = level self.ticksRemaining = ticksRemaining _Effect = List[_Effect] class RoomObject:
MIT License
forseti-security/forseti-security
google/cloud/forseti/scanner/audit/resource_rules_engine.py
ResourceRulesEngine.build_rule_book
python
def build_rule_book(self, global_configs=None): self.rule_book = ResourceRuleBook(self._load_rule_definitions())
Build ResourceRuleBook from the rules definition file. Args: global_configs (dict): Global configurations.
https://github.com/forseti-security/forseti-security/blob/de5d0f4d047c293a2a72545a76c3783980865551/google/cloud/forseti/scanner/audit/resource_rules_engine.py#L51-L57
from builtins import object import collections from google.cloud.forseti.common.util import logger from google.cloud.forseti.scanner.audit import base_rules_engine from google.cloud.forseti.scanner.audit import errors from google.cloud.forseti.services import utils LOGGER = logger.get_logger(__name__) _SUPPORTED_MODES = {'required'} RuleViolation = collections.namedtuple( 'RuleViolation', ['resource_id', 'resource_name', 'resource_type', 'full_name', 'rule_index', 'rule_name', 'violation_type', 'violation_data', 'resource_data'] ) class ResourceRulesEngine(base_rules_engine.BaseRulesEngine): def __init__(self, rules_file_path, snapshot_timestamp=None): super(ResourceRulesEngine, self).__init__( rules_file_path=rules_file_path) self.rule_book = None
Apache License 2.0
xilinx/pyxir
python/pyxir/contrib/target/components/DPUCZDX8G/ultra96_op_support.py
upsampling_op_support
python
def upsampling_op_support(X, bXs, tXs): method = X.attrs["method"] return method == "nearest_neighbor"
Check whether we can execute the provided Upsampling2D operator on the Ultra96 target
https://github.com/xilinx/pyxir/blob/bef661d6d77adcdbd2cf4163f2cf3a1d31d40406/python/pyxir/contrib/target/components/DPUCZDX8G/ultra96_op_support.py#L359-L366
import math import logging import pyxir logger = logging.getLogger("pyxir") @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "BatchNorm") def batchnorm_op_support(X, bXs, tXs): axis = X.attrs["axis"] channels = X.shapes[axis] return channels >= 1 and channels <= 2560 @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "BiasAdd") def biasadd_op_support(X, bXs, tXs): axis = X.attrs["axis"] channels = X.shapes[axis] return channels >= 1 and channels <= 2560 @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "Cast") def cast_op_support(X, bXs, tXs): dtype = X.attrs["dtype"] return dtype == "float32" @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "Concat") def concat_op_support(X, bXs, tXs): axis = X.attrs["axis"] channels = X.shapes[axis] return channels >= 1 and channels <= 2560 @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "Convolution") def conv2d_op_support(X, bXs, tXs): data_layout = X.attrs["data_layout"] kernel_h, kernel_w = X.attrs["kernel_size"] stride_h, stride_w = X.attrs["strides"] dilation_h, dilation_w = X.attrs["dilation"] padding_h, padding_w = ( X.attrs["padding"][data_layout.index("H")], X.attrs["padding"][data_layout.index("W")], ) padding_h_top, padding_h_bot = padding_h padding_w_left, padding_w_right = padding_w ch_in, ch_out = X.attrs["channels"] groups = X.attrs["groups"] return ( kernel_h >= 1 and kernel_h <= 16 and kernel_w >= 1 and kernel_w <= 16 and stride_h >= 1 and stride_h <= 4 and stride_w >= 1 and stride_w <= 4 and padding_h_top >= 0 and padding_h_top <= kernel_h - 1 and padding_h_bot >= 0 and padding_h_bot <= kernel_h - 1 and padding_w_left >= 0 and padding_w_left <= kernel_w - 1 and padding_w_right >= 0 and padding_w_right <= kernel_w - 1 and ch_in >= 1 and ch_in <= 2560 and ch_out >= 1 and ch_out <= 2560 and dilation_h * ch_in <= 2560 and (dilation_h == 1 or stride_h == 1) and dilation_w * ch_in <= 2560 and (dilation_w == 1 or stride_w == 1) ) @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "Conv2DTranspose") def conv2d_transpose_op_support(X, bXs, tXs): data_layout = X.attrs["data_layout"] kernel_h, kernel_w = X.attrs["kernel_size"] stride_h, stride_w = X.attrs["strides"] dilation_h, dilation_w = X.attrs["dilation"] padding_h, padding_w = ( X.attrs["padding"][data_layout.index("H")], X.attrs["padding"][data_layout.index("W")], ) padding_h_top, padding_h_bot = padding_h padding_w_left, padding_w_right = padding_w ch_in, ch_out = X.attrs["channels"] groups = X.attrs["groups"] return ( kernel_h >= 1 and kernel_h <= 16 and kernel_w >= 1 and kernel_w <= 16 and stride_w * ch_out >= 1 and stride_w * ch_out <= 2560 and stride_h >= 1 and padding_h_top >= 0 and padding_h_top <= kernel_h - 1 and padding_h_bot >= 0 and padding_h_bot <= kernel_h - 1 and padding_w_left >= 0 and padding_w_left <= kernel_w - 1 and padding_w_right >= 0 and padding_w_right <= kernel_w - 1 and ch_in >= 1 and ch_in <= 2560 and ch_out >= 1 and ch_out <= 2560 and dilation_h * ch_in <= 2560 and (dilation_h == 1 or stride_h == 1) and dilation_w * ch_in <= 2560 and (dilation_w == 1 or stride_w == 1) ) @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "DPU") def DPUCZDX8G_op_support(X, bXs, tXs): return True @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "Eltwise") def eltwise_op_support(X, bXs, tXs): return True @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "Pad") def pad_op_support(X, bXs, tXs): if len(tXs) == 1 and tXs[0].type[0] in ["Pooling", "Convolution"]: t_data_layout = tXs[0].attrs["data_layout"] t_type = tXs[0].type[0] padding_h, padding_w = ( X.attrs["padding"][t_data_layout.index("H")], X.attrs["padding"][t_data_layout.index("W")], ) padding_h_top, padding_h_bot = padding_h padding_w_left, padding_w_right = padding_w if t_type == "Pooling": return ( padding_h_top >= 0 and padding_h_top <= 4 and padding_h_bot >= 0 and padding_h_bot <= 4 and padding_w_left >= 0 and padding_w_left <= 4 and padding_w_right >= 0 and padding_w_right <= 4 ) elif t_type == "Convolution": t_kernel_h, t_kernel_w = tXs[0].attrs["kernel_size"] return ( padding_h_top >= 0 and padding_h_top <= t_kernel_h - 1 and padding_h_bot >= 0 and padding_h_bot <= t_kernel_h - 1 and padding_w_left >= 0 and padding_w_left <= t_kernel_w - 1 and padding_w_right >= 0 and padding_w_right <= t_kernel_w - 1 ) return False return False @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "Pooling") def pooling_op_support(X, bXs, tXs): data_layout = X.attrs["data_layout"] kernel_h, kernel_w = X.attrs["kernel_size"] stride_h, stride_w = X.attrs["strides"] padding_h, padding_w = ( X.attrs["padding"][data_layout.index("H")], X.attrs["padding"][data_layout.index("W")], ) padding_h_top, padding_h_bot = padding_h padding_w_left, padding_w_right = padding_w channels = X.shapes[data_layout.index("C")] return ( kernel_h >= 1 and kernel_h <= 8 and kernel_w >= 1 and kernel_w <= 8 and stride_h >= 1 and stride_h <= 4 and stride_w >= 1 and stride_w <= 4 and padding_h_top >= 0 and padding_h_top <= 4 and padding_h_bot >= 0 and padding_h_bot <= 4 and padding_w_left >= 0 and padding_w_left <= 4 and padding_w_right >= 0 and padding_w_right <= 4 and channels >= 1 and channels <= 2560 ) @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "Mean") def mean_op_support(X, bXs, tXs): axes = X.attrs["axes"] keepdims = X.attrs["keepdims"] return len(axes) == 2 and keepdims @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "LeakyReLU") def leaky_relu_op_support(X, bXs, tXs): alpha = X.attrs["alpha"] return math.isclose(alpha, 0.1, rel_tol=1e-5) @pyxir.register_op_support_check('DPUCZDX8G-ultra96', 'ReLU') def relu_op_support(X, bXs, tXs): assert len(bXs) == 1 bX = bXs[0] return bX.type[0] in set( [ "Convolution", "Conv2DTranspose", "Eltwise", "BatchNorm", "BiasAdd", "Scale", "Pooling", ] ) @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "ReLU6") def relu6_op_support(X, bXs, tXs): return True @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "Scale") def scale_op_support(X, bXs, tXs): axis = X.attrs["axis"] channels = X.shapes[axis] return channels > 1 and channels <= 2560 @pyxir.register_op_support_check("DPUCZDX8G-ultra96", "Upsampling2D")
Apache License 2.0
llnl/merlin
merlin/spec/specification.py
MerlinSpec.get_task_queues
python
def get_task_queues(self): from merlin.config.configfile import CONFIG steps = self.get_study_steps() queues = {} for step in steps: if "task_queue" in step.run and CONFIG.celery.omit_queue_tag: queues[step.name] = step.run["task_queue"] elif "task_queue" in step.run: queues[step.name] = CONFIG.celery.queue_tag + step.run["task_queue"] return queues
Returns a dictionary of steps and their corresponding task queues.
https://github.com/llnl/merlin/blob/b03640006e156059b801fe2179f28760fb38029a/merlin/spec/specification.py#L330-L341
import logging import os import shlex from io import StringIO import yaml from maestrowf.datastructures import YAMLSpecification from merlin.spec import all_keys, defaults LOG = logging.getLogger(__name__) class MerlinSpec(YAMLSpecification): def __init__(self): super(MerlinSpec, self).__init__() @property def yaml_sections(self): return { "description": self.description, "batch": self.batch, "env": self.environment, "study": self.study, "global.parameters": self.globals, "merlin": self.merlin, } @property def sections(self): return { "description": self.description, "batch": self.batch, "environment": self.environment, "study": self.study, "globals": self.globals, "merlin": self.merlin, } @classmethod def load_specification(cls, filepath, suppress_warning=True): spec = super(MerlinSpec, cls).load_specification(filepath) with open(filepath, "r") as f: spec.merlin = MerlinSpec.load_merlin_block(f) spec.specroot = os.path.dirname(spec.path) spec.process_spec_defaults() if not suppress_warning: spec.warn_unrecognized_keys() return spec @classmethod def load_spec_from_string(cls, string): spec = super(MerlinSpec, cls).load_specification_from_stream(StringIO(string)) spec.merlin = MerlinSpec.load_merlin_block(StringIO(string)) spec.specroot = None spec.process_spec_defaults() return spec @staticmethod def load_merlin_block(stream): try: merlin_block = yaml.safe_load(stream)["merlin"] except KeyError: merlin_block = {} warning_msg: str = ( "Workflow specification missing \n " "encouraged 'merlin' section! Run 'merlin example' for examples.\n" "Using default configuration with no sampling." ) LOG.warning(warning_msg) return merlin_block def process_spec_defaults(self): for name, section in self.sections.items(): if section is None: setattr(self, name, {}) MerlinSpec.fill_missing_defaults(self.batch, defaults.BATCH["batch"]) MerlinSpec.fill_missing_defaults(self.environment, defaults.ENV["env"]) MerlinSpec.fill_missing_defaults( self.globals, defaults.PARAMETER["global.parameters"] ) defaults.STUDY_STEP_RUN["shell"] = self.batch["shell"] for step in self.study: MerlinSpec.fill_missing_defaults(step["run"], defaults.STUDY_STEP_RUN) MerlinSpec.fill_missing_defaults(self.merlin, defaults.MERLIN["merlin"]) if self.merlin["resources"]["workers"] is None: self.merlin["resources"]["workers"] = {"default_worker": defaults.WORKER} else: for worker, vals in self.merlin["resources"]["workers"].items(): MerlinSpec.fill_missing_defaults(vals, defaults.WORKER) if self.merlin["samples"] is not None: MerlinSpec.fill_missing_defaults(self.merlin["samples"], defaults.SAMPLES) @staticmethod def fill_missing_defaults(object_to_update, default_dict): def recurse(result, defaults): if not isinstance(defaults, dict): return for key, val in defaults.items(): if (key not in result) or ( (result[key] is None) and (defaults[key] is not None) ): result[key] = val else: recurse(result[key], val) recurse(object_to_update, default_dict) def warn_unrecognized_keys(self): MerlinSpec.check_section("description", self.description, all_keys.DESCRIPTION) MerlinSpec.check_section("batch", self.batch, all_keys.BATCH) MerlinSpec.check_section("env", self.environment, all_keys.ENV) for param, contents in self.globals.items(): MerlinSpec.check_section("global.parameters", contents, all_keys.PARAMETER) for step in self.study: MerlinSpec.check_section(step["name"], step, all_keys.STUDY_STEP) MerlinSpec.check_section( step["name"] + ".run", step["run"], all_keys.STUDY_STEP_RUN ) MerlinSpec.check_section("merlin", self.merlin, all_keys.MERLIN) MerlinSpec.check_section( "merlin.resources", self.merlin["resources"], all_keys.MERLIN_RESOURCES ) for worker, contents in self.merlin["resources"]["workers"].items(): MerlinSpec.check_section( "merlin.resources.workers " + worker, contents, all_keys.WORKER ) if self.merlin["samples"]: MerlinSpec.check_section( "merlin.samples", self.merlin["samples"], all_keys.SAMPLES ) @staticmethod def check_section(section_name, section, all_keys): diff = set(section.keys()).difference(all_keys) for extra in diff: LOG.warn( f"Unrecognized key '{extra}' found in spec section '{section_name}'." ) def dump(self): tab = 3 * " " result = self._dict_to_yaml(self.yaml_sections, "", [], tab) while "\n\n\n" in result: result = result.replace("\n\n\n", "\n\n") try: yaml.safe_load(result) except BaseException as e: raise ValueError(f"Error parsing provenance spec:\n{e}") return result def _dict_to_yaml(self, obj, string, key_stack, tab, newline=True): if obj is None: return "" lvl = len(key_stack) - 1 if isinstance(obj, str): return self._process_string(obj, lvl, tab) elif isinstance(obj, bool): return str(obj).lower() elif not (isinstance(obj, list) or isinstance(obj, dict)): return obj else: return self._process_dict_or_list(obj, string, key_stack, lvl, tab) def _process_string(self, obj, lvl, tab): split = obj.splitlines() if len(split) > 1: obj = "|\n" + tab * (lvl + 1) + ("\n" + tab * (lvl + 1)).join(split) return obj def _process_dict_or_list(self, obj, string, key_stack, lvl, tab): from copy import deepcopy list_offset = 2 * " " if isinstance(obj, list): n = len(obj) use_hyphens = key_stack[-1] in ["paths", "sources", "git", "study"] if not use_hyphens: string += "[" else: string += "\n" for i, elem in enumerate(obj): key_stack = deepcopy(key_stack) key_stack.append("elem") if use_hyphens: string += ( (lvl + 1) * tab + "- " + str(self._dict_to_yaml(elem, "", key_stack, tab)) + "\n" ) else: string += str( self._dict_to_yaml(elem, "", key_stack, tab, newline=(i != 0)) ) if n > 1 and i != len(obj) - 1: string += ", " key_stack.pop() if not use_hyphens: string += "]" else: if len(key_stack) > 0 and key_stack[-1] != "elem": string += "\n" i = 0 for k, v in obj.items(): key_stack = deepcopy(key_stack) key_stack.append(k) if len(key_stack) > 1 and key_stack[-2] == "elem" and i == 0: string += "" elif "elem" in key_stack: string += list_offset + (tab * lvl) else: string += tab * (lvl + 1) string += ( str(k) + ": " + str(self._dict_to_yaml(v, "", key_stack, tab)) + "\n" ) key_stack.pop() i += 1 return string
MIT License
google/google-apputils
google/apputils/humanize.py
NaturalSortKey
python
def NaturalSortKey(data): segments = DIGIT_SPLITTER(data) for i, value in enumerate(segments): if value.isdigit(): segments[i] = int(value) return segments
Key function for "natural sort" ordering. This key function results in a lexigraph sort. For example: - ['1, '3', '20'] (not ['1', '20', '3']). - ['Model 9', 'Model 70 SE', 'Model 70 SE2'] (not ['Model 70 SE', 'Model 70 SE2', 'Model 9']). Usage: new_list = sorted(old_list, key=humanize.NaturalSortKey) or list_sort_in_place.sort(key=humanize.NaturalSortKey) Based on code by Steven Bazyl <sbazyl@google.com>. Args: data: str, The key being compared in a sort. Returns: A list which is comparable to other lists for the purpose of sorting.
https://github.com/google/google-apputils/blob/ce33c1bea34da107bf7d336798bd688bd87a42f5/google/apputils/humanize.py#L437-L462
import datetime import math import re SIBILANT_ENDINGS = frozenset(['sh', 'ss', 'tch', 'ax', 'ix', 'ex']) DIGIT_SPLITTER = re.compile(r'\d+|\D+').findall SPECIAL_PLURALS = { 'index': 'indices', 'matrix': 'matrices', 'vertex': 'vertices', } VOWELS = frozenset('AEIOUaeiou') def Commas(value): if value < 0: sign = '-' value = -value else: sign = '' result = [] while value >= 1000: result.append('%03d' % (value % 1000)) value /= 1000 result.append('%d' % value) return sign + ','.join(reversed(result)) def Plural(quantity, singular, plural=None): return '%d %s' % (quantity, PluralWord(quantity, singular, plural)) def PluralWord(quantity, singular, plural=None): if quantity == 1: return singular if plural: return plural if singular in SPECIAL_PLURALS: return SPECIAL_PLURALS[singular] for ending in SIBILANT_ENDINGS: if singular.endswith(ending): return '%ses' % singular if singular.endswith('o') and singular[-2:-1] not in VOWELS: return '%ses' % singular if singular.endswith('y') and singular[-2:-1] not in VOWELS: return '%sies' % singular[:-1] return '%ss' % singular def WordSeries(words, conjunction='and'): num_words = len(words) if num_words == 0: return '' elif num_words == 1: return words[0] elif num_words == 2: return (' %s ' % conjunction).join(words) else: return '%s, %s %s' % (', '.join(words[:-1]), conjunction, words[-1]) def AddIndefiniteArticle(noun): if not noun: raise ValueError('argument must be a word: {!r}'.format(noun)) if noun[0] in VOWELS: return 'an ' + noun else: return 'a ' + noun def DecimalPrefix(quantity, unit, precision=1, min_scale=0, max_scale=None): return _Prefix(quantity, unit, precision, DecimalScale, min_scale=min_scale, max_scale=max_scale) def BinaryPrefix(quantity, unit, precision=1): return _Prefix(quantity, unit, precision, BinaryScale) def _Prefix(quantity, unit, precision, scale_callable, **args): separator = ' ' if unit else '' if not quantity: return '0%s%s' % (separator, unit) if quantity in [float('inf'), float('-inf')] or math.isnan(quantity): return '%f%s%s' % (quantity, separator, unit) scaled_quantity, scaled_unit = scale_callable(quantity, unit, **args) if scaled_unit: separator = ' ' print_pattern = '%%.%df%%s%%s' % max(0, (precision - int( math.log(abs(scaled_quantity), 10)) - 1)) return print_pattern % (scaled_quantity, separator, scaled_unit) DECIMAL_PREFIXES = ('y', 'z', 'a', 'f', 'p', 'n', u'µ', 'm', '', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') DECIMAL_MIN_SCALE = -8 DECIMAL_MAX_SCALE = 8 def DecimalScale(quantity, unit, min_scale=0, max_scale=None): if min_scale is None or min_scale < DECIMAL_MIN_SCALE: min_scale = DECIMAL_MIN_SCALE if max_scale is None or max_scale > DECIMAL_MAX_SCALE: max_scale = DECIMAL_MAX_SCALE powers = DECIMAL_PREFIXES[ min_scale - DECIMAL_MIN_SCALE:max_scale - DECIMAL_MIN_SCALE + 1] return _Scale(quantity, unit, 1000, powers, min_scale) def BinaryScale(quantity, unit): return _Scale(quantity, unit, 1024, ('Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi')) def _Scale(quantity, unit, multiplier, prefixes=None, min_scale=None): if (not prefixes or not quantity or math.isnan(quantity) or quantity in [float('inf'), float('-inf')]): return float(quantity), unit if min_scale is None: min_scale = 0 prefixes = ('',) + tuple(prefixes) value, prefix = quantity, '' for power, prefix in enumerate(prefixes, min_scale): value = float(quantity) * multiplier ** -power if abs(value) < multiplier: break return value, prefix + unit FRACTIONS = { 3: (None, u'⅓', u'⅔', None), 5: (None, u'⅕', u'⅖', u'⅗', u'⅘', None), 8: (None, u'⅛', u'¼', u'⅜', u'½', u'⅝', u'¾', u'⅞', None), } FRACTION_ROUND_DOWN = 1.0 / (max(FRACTIONS.keys()) * 2.0) FRACTION_ROUND_UP = 1.0 - FRACTION_ROUND_DOWN def PrettyFraction(number, spacer=''): if number < -FRACTION_ROUND_DOWN: return u'-%s' % PrettyFraction(-number) number = abs(number) rounded = int(number) fract = number - rounded if fract >= FRACTION_ROUND_UP: return str(rounded + 1) errors_fractions = [] for denominator, fraction_elements in FRACTIONS.items(): numerator = int(round(denominator * fract)) error = abs(fract - (float(numerator) / float(denominator))) errors_fractions.append((error, fraction_elements[numerator])) unused_error, fraction_text = min(errors_fractions) if rounded and fraction_text: return u'%d%s%s' % (rounded, spacer, fraction_text) if rounded: return str(rounded) if fraction_text: return fraction_text return u'0' def Duration(duration, separator=' '): try: delta = datetime.timedelta(seconds=duration) except OverflowError: return '>=' + TimeDelta(datetime.timedelta.max) return TimeDelta(delta, separator=separator) def TimeDelta(delta, separator=' '): parts = [] seconds = delta.seconds if delta.days: parts.append('%dd' % delta.days) if seconds >= 3600: parts.append('%dh' % (seconds // 3600)) seconds %= 3600 if seconds >= 60: parts.append('%dm' % (seconds // 60)) seconds %= 60 seconds += delta.microseconds / 1e6 if seconds or not parts: parts.append('%gs' % seconds) return separator.join(parts)
Apache License 2.0
google/containerregistry
client/v2_2/docker_digest_.py
SHA256
python
def SHA256(content, prefix='sha256:'): return prefix + hashlib.sha256(content).hexdigest()
Return 'sha256:' + hex(sha256(content)).
https://github.com/google/containerregistry/blob/8a11dc8c53003ecf5b72ffaf035ba280109356ac/client/v2_2/docker_digest_.py#L25-L27
from __future__ import absolute_import from __future__ import division from __future__ import print_function import hashlib
Apache License 2.0
machine-learning-exchange/mlx
api/server/swagger_server/models/api_notebook.py
ApiNotebook.created_at
python
def created_at(self) -> datetime: return self._created_at
Gets the created_at of this ApiNotebook. :return: The created_at of this ApiNotebook. :rtype: datetime
https://github.com/machine-learning-exchange/mlx/blob/be1503c45538dac1a8188560fbec4a07b2a367bf/api/server/swagger_server/models/api_notebook.py#L122-L129
from __future__ import absolute_import from datetime import date, datetime from typing import List, Dict from swagger_server.models.api_asset import ApiAsset from swagger_server.models.api_metadata import ApiMetadata from swagger_server.models.api_parameter import ApiParameter from swagger_server import util class ApiNotebook(ApiAsset): def __init__(self, id: str=None, created_at: datetime=None, name: str=None, description: str=None, featured: bool=None, publish_approved: bool=None, related_assets: List[str]=None, filter_categories: Dict[str, str]=None, url: str=None, metadata: ApiMetadata=None, parameters: List[ApiParameter]=None): self.swagger_types = { 'id': str, 'created_at': datetime, 'name': str, 'description': str, 'featured': bool, 'publish_approved': bool, 'related_assets': List[str], 'filter_categories': Dict[str, str], 'url': str, 'metadata': ApiMetadata, 'parameters': List[ApiParameter] } self.attribute_map = { 'id': 'id', 'created_at': 'created_at', 'name': 'name', 'description': 'description', 'featured': 'featured', 'publish_approved': 'publish_approved', 'related_assets': 'related_assets', 'filter_categories': 'filter_categories', 'url': 'url', 'metadata': 'metadata', 'parameters': 'parameters' } self._id = id self._created_at = created_at self._name = name self._description = description self._featured = featured self._publish_approved = publish_approved self._related_assets = related_assets self._filter_categories = filter_categories self._url = url self._metadata = metadata self._parameters = parameters @classmethod def from_dict(cls, dikt) -> 'ApiNotebook': return util.deserialize_model(dikt, cls) @property def id(self) -> str: return self._id @id.setter def id(self, id: str): self._id = id @property
Apache License 2.0
hyperledger/indy-plenum
plenum/server/consensus/view_change_storages.py
ViewChangeVotesForNode.view_change
python
def view_change(self) -> Optional[ViewChange]: return self._view_change
Returns received view change
https://github.com/hyperledger/indy-plenum/blob/406afdeca1630be688f803a3cba15115faa20e2b/plenum/server/consensus/view_change_storages.py#L38-L42
from _sha256 import sha256 from collections import defaultdict from typing import Optional, List, Tuple from common.serializers.json_serializer import JsonSerializer from plenum.common.messages.internal_messages import VoteForViewChange from plenum.common.messages.node_messages import ViewChange, ViewChangeAck, NewView from plenum.common.types import f from plenum.server.consensus.utils import replica_name_to_node_name def view_change_digest(msg: ViewChange) -> str: msg_as_dict = msg._asdict() serialized = JsonSerializer().dumps(msg_as_dict) return sha256(serialized).hexdigest() class ViewChangeVotesForNode: def __init__(self, quorums): self._view_change = None self._digest = None self._quorums = quorums self._acks = defaultdict(set) @property def digest(self) -> Optional[str]: return self._digest @property
Apache License 2.0
pymir3/pymir3
mir3/lib/mir/features.py
energy
python
def energy(A): return numpy.sum(A**2 , 0)
Energy of each frame
https://github.com/pymir3/pymir3/blob/c1bcca66a5ef1ff0ebd6373e3820e72dee6b0b70/mir3/lib/mir/features.py#L17-L19
import numpy def flatness(A): return numpy.exp( numpy.mean(numpy.log(numpy.maximum(A, 0.0001)), 0) ) / (numpy.mean(A, 0) + (10**(-6)))
MIT License
robinandeer/puzzle
tests/conftest.py
gemini_db_path
python
def gemini_db_path(request): hapmap = "tests/fixtures/HapMapFew.db" return hapmap
Return the path to the hapmap gemini db
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/tests/conftest.py#L102-L105
import os import shutil import pytest import logging from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from puzzle.models import (Variant, DotDict, Individual) from puzzle.models.sql import BASE from puzzle.models.sql import Case as SqlCase from puzzle.plugins import VcfPlugin, SqlStore from puzzle.utils import (get_cases, get_header) from puzzle.log import configure_stream root_logger = configure_stream() logger = logging.getLogger(__name__) @pytest.fixture def vcf(): db = VcfPlugin() return db @pytest.fixture(scope='function') def puzzle_dir(request, dir_path): db_path = os.path.join(dir_path, 'puzzle_db.sqlite3') logger.debug("db path is: {}".format(db_path)) resource_dir = os.path.join(dir_path, 'resources') logger.debug("resource dir is: {}".format(resource_dir)) logger.debug("Create directory: {0}".format(resource_dir)) os.makedirs(resource_dir) logger.debug('Directory created') logger.debug('Connect to database and create tables') store = SqlStore(db_path) store.set_up(reset=False) return dir_path @pytest.fixture(scope='function') def gemini_path(request): gemini_db = "tests/fixtures/HapMapFew.db" return gemini_db @pytest.fixture(scope='function') def vcf_file(request): hapmap = "tests/fixtures/hapmap.vcf" return hapmap @pytest.fixture(scope='function') def vcf_file_no_ind(request): hapmap = "tests/fixtures/no_ind.vcf" return hapmap @pytest.fixture(scope='function') def vcf_file_sv(request): hapmap = "tests/fixtures/hapmap.sv.vep.vcf.gz" return hapmap @pytest.fixture(scope='function') def compressed_vcf_file(request): hapmap = "tests/fixtures/hapmap.vcf.gz" return hapmap @pytest.fixture(scope='function') def indexed_vcf_file(request): hapmap = "tests/fixtures/hapmap_pos.vcf.gz" return hapmap @pytest.fixture(scope='function') def root_path(request): root = "tests/fixtures/" return root @pytest.fixture(scope='function') def ped_file(request): hapmap = "tests/fixtures/hapmap.ped" return hapmap @pytest.fixture(scope='function')
MIT License
rajammanabrolu/worldgeneration
evennia-engine/evennia/evennia/contrib/turnbattle/tb_range.py
TBRangeTurnHandler.start_turn
python
def start_turn(self, character): character.db.combat_actionsleft = ACTIONS_PER_TURN character.msg("|wIt's your turn!|n") combat_status_message(character)
Readies a character for the start of their turn by replenishing their available actions and notifying them that their turn has come up. Args: character (obj): Character to be readied. Notes: In this example, characters are given two actions per turn. This allows characters to both move and attack in the same turn (or, alternately, move twice or attack twice).
https://github.com/rajammanabrolu/worldgeneration/blob/5e97df013399e1a401d0a7ec184c4b9eb3100edd/evennia-engine/evennia/evennia/contrib/turnbattle/tb_range.py#L684-L700
from random import randint from evennia import DefaultCharacter, DefaultObject, Command, default_cmds, DefaultScript from evennia.commands.default.help import CmdHelp TURN_TIMEOUT = 30 ACTIONS_PER_TURN = 2 def roll_init(character): return randint(1, 1000) def get_attack(attacker, defender, attack_type): attack_value = randint(1, 100) if attack_type == "melee": attack_value += 15 if attack_type == "ranged": attack_value -= 15 return attack_value def get_defense(attacker, defender, attack_type): defense_value = 50 return defense_value def get_damage(attacker, defender): damage_value = randint(15, 25) return damage_value def apply_damage(defender, damage): defender.db.hp -= damage if defender.db.hp <= 0: defender.db.hp = 0 def at_defeat(defeated): defeated.location.msg_contents("%s has been defeated!" % defeated) def resolve_attack(attacker, defender, attack_type, attack_value=None, defense_value=None): if not attack_value: attack_value = get_attack(attacker, defender, attack_type) if not defense_value: defense_value = get_defense(attacker, defender, attack_type) if attack_value < defense_value: attacker.location.msg_contents( "%s's %s attack misses %s!" % (attacker, attack_type, defender) ) else: damage_value = get_damage(attacker, defender) attacker.location.msg_contents( "%s hits %s with a %s attack for %i damage!" % (attacker, defender, attack_type, damage_value) ) apply_damage(defender, damage_value) if defender.db.hp <= 0: at_defeat(defender) def get_range(obj1, obj2): if not obj1.db.combat_range: return None if not obj2.db.combat_range: return None if obj1 not in obj2.db.combat_range: return None if obj2 not in obj1.db.combat_range: return None return obj1.db.combat_range[obj2] def distance_inc(mover, target): mover.db.combat_range[target] += 1 target.db.combat_range[mover] = mover.db.combat_range[target] if get_range(mover, target) > 2: target.db.combat_range[mover] = 2 mover.db.combat_range[target] = 2 def approach(mover, target): def distance_dec(mover, target): mover.db.combat_range[target] -= 1 target.db.combat_range[mover] = mover.db.combat_range[target] if get_range(mover, target) <= 0: target.db.combat_range[mover] = 0 mover.db.combat_range = target.db.combat_range for thing in mover.location.contents: if thing != mover and thing != target: thing.db.combat_range[mover] = thing.db.combat_range[target] contents = mover.location.contents for thing in contents: if thing != mover and thing != target: if get_range(mover, thing) > get_range(target, thing): distance_dec(mover, thing) if get_range(mover, thing) < get_range(target, thing): distance_inc(mover, thing) distance_dec(mover, target) def withdraw(mover, target): contents = mover.location.contents for thing in contents: if thing != mover and thing != target: if get_range(mover, thing) >= get_range(target, thing) and get_range( mover, thing ) < get_range(mover, target): distance_inc(mover, thing) if get_range(target, thing) == 0: distance_inc(mover, thing) if get_range(mover, thing) == 0: distance_inc(mover, thing) distance_inc(mover, target) def combat_cleanup(character): for attr in character.attributes.all(): if attr.key[:7] == "combat_": character.attributes.remove(key=attr.key) def is_in_combat(character): return bool(character.db.combat_turnhandler) def is_turn(character): turnhandler = character.db.combat_turnhandler currentchar = turnhandler.db.fighters[turnhandler.db.turn] return bool(character == currentchar) def spend_action(character, actions, action_name=None): if action_name: character.db.combat_lastaction = action_name if actions == "all": character.db.combat_actionsleft = 0 else: character.db.combat_actionsleft -= actions if character.db.combat_actionsleft < 0: character.db.combat_actionsleft = 0 character.db.combat_turnhandler.turn_end_check(character) def combat_status_message(fighter): if not fighter.db.max_hp: fighter.db.hp = 100 fighter.db.max_hp = 100 status_msg = "HP Remaining: %i / %i" % (fighter.db.hp, fighter.db.max_hp) if not is_in_combat(fighter): fighter.msg(status_msg) return engaged_obj = [] reach_obj = [] range_obj = [] for thing in fighter.db.combat_range: if thing != fighter: if fighter.db.combat_range[thing] == 0: engaged_obj.append(thing) if fighter.db.combat_range[thing] == 1: reach_obj.append(thing) if fighter.db.combat_range[thing] > 1: range_obj.append(thing) if engaged_obj: status_msg += "|/Engaged targets: %s" % ", ".join(obj.key for obj in engaged_obj) if reach_obj: status_msg += "|/Reach targets: %s" % ", ".join(obj.key for obj in reach_obj) if range_obj: status_msg += "|/Ranged targets: %s" % ", ".join(obj.key for obj in range_obj) fighter.msg(status_msg) class TBRangeTurnHandler(DefaultScript): def at_script_creation(self): self.key = "Combat Turn Handler" self.interval = 5 self.persistent = True self.db.fighters = [] for thing in self.obj.contents: if thing.db.hp: self.db.fighters.append(thing) for fighter in self.db.fighters: self.initialize_for_combat(fighter) self.obj.db.combat_turnhandler = self for thing in self.obj.contents: self.init_range(thing) ordered_by_roll = sorted(self.db.fighters, key=roll_init, reverse=True) self.db.fighters = ordered_by_roll self.obj.msg_contents("Turn order is: %s " % ", ".join(obj.key for obj in self.db.fighters)) self.start_turn(self.db.fighters[0]) self.db.turn = 0 self.db.timer = TURN_TIMEOUT def at_stop(self): for thing in self.obj.contents: combat_cleanup(thing) self.obj.db.combat_turnhandler = None def at_repeat(self): currentchar = self.db.fighters[ self.db.turn ] self.db.timer -= self.interval if self.db.timer <= 0: self.obj.msg_contents("%s's turn timed out!" % currentchar) spend_action( currentchar, "all", action_name="disengage" ) return elif self.db.timer <= 10 and not self.db.timeout_warning_given: currentchar.msg("WARNING: About to time out!") self.db.timeout_warning_given = True def init_range(self, to_init): rangedict = {} objectlist = self.obj.contents for thing in objectlist: if thing == to_init: rangedict.update({thing: 0}) else: if thing.destination or to_init.destination: rangedict.update({thing: 2}) else: rangedict.update({thing: 1}) to_init.db.combat_range = rangedict def join_rangefield(self, to_init, anchor_obj=None, add_distance=0): contents = self.obj.contents contents.remove(to_init) if not anchor_obj: anchor_obj = contents[randint(0, (len(contents) - 1))] to_init.db.combat_range = anchor_obj.db.combat_range for thing in contents: new_objects_range = thing.db.combat_range[anchor_obj] thing.db.combat_range.update({to_init: new_objects_range}) to_init.db.combat_range.update({to_init: 0}) for n in range(add_distance): withdraw(to_init, anchor_obj) def initialize_for_combat(self, character): combat_cleanup(character) character.db.combat_actionsleft = ( 0 ) character.db.combat_turnhandler = ( self ) character.db.combat_lastaction = "null"
MIT License
icfpc2016/icfpc2016-judge
hibiki/app/hibiki/model.py
get_problem_for_admin
python
def get_problem_for_admin(problem_id): problem = _db.problems.find_one({'_id': problem_id}) if not problem: raise KeyError('Problem not found: %s' % problem_id) return problem
Returns a problem. Args: problem_id: Numeric ID of the problem. Returns: A problem dictionary. Raises: KeyError: If the specified problem was not found.
https://github.com/icfpc2016/icfpc2016-judge/blob/ece7a536fa480f141d0d305b090b1bfe195542f8/hibiki/app/hibiki/model.py#L773-L788
import hashlib import json import logging import os import bson.binary import gflags from passlib.hash import sha256_crypt import pymongo import pymongo.collection import pymongo.errors import ujson from hibiki import misc_util from hibiki import scoring from hibiki import settings from hibiki import storage FLAGS = gflags.FLAGS gflags.DEFINE_string( 'mongodb_url', 'mongodb://localhost', 'MongoDB URL to connect to.') gflags.DEFINE_string( 'mongodb_db', 'icfpcontest2016', 'MongoDB database name.') gflags.DEFINE_bool( 'mongodb_explain', False, 'Log MongoDB query explains.') gflags.DEFINE_string( 'storage_gcs_bucket_name', None, 'Name of the GCS bucket used to storage large blobs.') gflags.DEFINE_bool( 'disable_model_cache_for_testing', False, 'Disables model caching for testing.') _client = None _db = None _cookie_master_secret = None PASSWORDLESS_HASH = '*passwordless*' def connect(): global _client global _db assert not _client, 'connect() called multiple times!' _client = pymongo.MongoClient(FLAGS.mongodb_url) _db = _client[FLAGS.mongodb_db] server_version = tuple(_client.server_info()['version'].split('.')) assert server_version >= (2, 6), ( 'MongoDB server version is old. Please upgrade to 2.6+.') if FLAGS.storage_gcs_bucket_name: storage.connect(FLAGS.storage_gcs_bucket_name) _init_model() def _init_model(): _ensure_cookie_secret() _ensure_indices() _ensure_organizer_users() def _ensure_cookie_secret(): global _cookie_master_secret entry = _db.config.find_one({'_id': 'cookie_master_secret'}) if not entry: tmp_cookie_master_secret = misc_util.generate_random_id(length=32) try: entry = { '_id': 'cookie_master_secret', 'value': tmp_cookie_master_secret, } _db.config.insert_one(entry) except pymongo.errors.DuplicateKeyError: entry = _db.config.find_one({'_id': 'cookie_master_secret'}) assert entry _cookie_master_secret = entry['value'] def _ensure_indices(): _db.users.create_index([ ('create_time', pymongo.ASCENDING), ], background=True) _db.users.create_index([ ('api_key', pymongo.ASCENDING), ], background=True) _db.problems.create_index([ ('public', pymongo.ASCENDING), ('_id', pymongo.ASCENDING), ], background=True) _db.problems.create_index([ ('public', pymongo.ASCENDING), ('publish_time', pymongo.ASCENDING), ], background=True) _db.problems.create_index([ ('publish_time', pymongo.ASCENDING), ], background=True) _db.problem_ranking_snapshots.create_index([ ('problem_id', pymongo.ASCENDING), ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.problem_ranking_snapshots.create_index([ ('public', pymongo.ASCENDING), ('problem_id', pymongo.ASCENDING), ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.problems.create_index([ ('public', pymongo.ASCENDING), ('publish_time', pymongo.ASCENDING), ], background=True) _db.problem_ranking_snapshots.create_index([ ('problem_id', pymongo.ASCENDING), ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.solutions.create_index([ ('problem_id', pymongo.ASCENDING), ('create_time', pymongo.ASCENDING), ], background=True) _db.leaderboard_snapshots.create_index([ ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.leaderboard_snapshots.create_index([ ('public', pymongo.ASCENDING), ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.problem_ranking_snapshots.create_index([ ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.leaderboard_snapshots.create_index([ ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.public_contest_snapshots.create_index([ ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.problem_ranking_snapshots.create_index([ ('public', pymongo.ASCENDING), ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.leaderboard_snapshots.create_index([ ('public', pymongo.ASCENDING), ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.public_contest_snapshots.create_index([ ('snapshot_time', pymongo.ASCENDING), ], background=True) _db.problems.create_index([ ('owner', pymongo.ASCENDING), ], background=True) _db.solutions.create_index([ ('owner', pymongo.ASCENDING), ], background=True) def _ensure_organizer_users(): try: _db.config.insert_one( { '_id': 'user_counter', 'value': 10, }) except pymongo.errors.DuplicateKeyError: return for i in xrange(10): username = '%d' % (i + 1) api_key = '%s-%s' % (username, misc_util.generate_random_id(32)) display_name = 'Contest Organizer Problem Set %s' % chr(ord('A') + i) contact_email = 'organizer%d@example.com' % (i + 1) member_names = 'N/A' remote_host = '127.127.127.%d' % i user = { '_id': username, 'password_hash': PASSWORDLESS_HASH, 'api_key': api_key, 'display_name': display_name, 'contact_email': contact_email, 'member_names': member_names, 'create_time': misc_util.time(), 'register_remote_host': remote_host, 'organizer': True, } _db.users.insert_one(user) def _maybe_explain_query(cursor): if FLAGS.mongodb_explain: logging.info( 'EXPLAIN: %s', json.dumps( cursor.explain()['queryPlanner']['winningPlan'], indent=2)) def _increment_atomic_counter(key): try: entry = _db.config.find_one_and_update( {'_id': key}, { '$setOnInsert': {'_id': key}, '$inc': {'value': 1}, }, upsert=True, return_document=pymongo.collection.ReturnDocument.AFTER) except pymongo.errors.DuplicateKeyError: entry = _db.config.find_one_and_update( {'_id': key}, {'$inc': {'value': 1}}, return_document=pymongo.collection.ReturnDocument.AFTER) return entry['value'] def record_last_api_access_time(username): now = misc_util.time() try: entry = _db.api_last_accesses.find_one_and_update( {'_id': username}, { '$setOnInsert': {'_id': username}, '$set': {'last_access_time': now}, }, upsert=True, return_document=pymongo.collection.ReturnDocument.BEFORE) except pymongo.errors.DuplicateKeyError: entry = _db.api_last_accesses.find_one_and_update( {'_id': username}, {'$set': {'last_access_time': now}}, return_document=pymongo.collection.ReturnDocument.BEFORE) if not entry: return 86400 return now - entry['last_access_time'] def increment_api_rate_limit_counter(username, action): last_window_time = misc_util.align_timestamp( misc_util.time(), FLAGS.contest_start_time, FLAGS.api_rate_limit_window_size) key = '%s:%d:%s' % (action, last_window_time, username) try: entry = _db.api_rate_limits.find_one_and_update( {'_id': key}, { '$setOnInsert': {'_id': key}, '$inc': {'value': 1}, }, upsert=True, return_document=pymongo.collection.ReturnDocument.AFTER) except pymongo.errors.DuplicateKeyError: entry = _db.api_rate_limits.find_one_and_update( {'_id': key}, {'$inc': {'value': 1}}, return_document=pymongo.collection.ReturnDocument.AFTER) return entry['value'] def decrement_web_rate_limit_counter(username): while True: current_time_millis = int(misc_util.time() * 1000) entry = _db.web_rate_limits.find_one({'_id': username}) if not entry: entry = { '_id': username, 'last_access_time_millis': current_time_millis, 'tokens': FLAGS.web_rate_limit_allowed_burst_requests, } try: _db.web_rate_limits.insert_one(entry) except pymongo.errors.DuplicateKeyError: entry = _db.web_rate_limits.find_one({'_id': username}) last_tokens = entry['tokens'] last_access_time_millis = entry['last_access_time_millis'] current_time_millis = int(misc_util.time() * 1000) if last_access_time_millis >= current_time_millis: break delta_minutes = (current_time_millis - last_access_time_millis) / 60000.0 new_tokens = min( FLAGS.web_rate_limit_allowed_burst_requests, last_tokens + FLAGS.web_rate_limit_requests_per_minute * delta_minutes) entry = _db.web_rate_limits.find_one_and_update( { '_id': username, 'last_access_time_millis': last_access_time_millis, }, { '$set': { 'last_access_time_millis': current_time_millis, 'tokens': new_tokens, }, }, return_document=pymongo.collection.ReturnDocument.AFTER) if entry: break entry = _db.web_rate_limits.find_one_and_update( { '_id': username, 'tokens': {'$gte': 1}, }, { '$inc': { 'tokens': -1, }, }, return_document=pymongo.collection.ReturnDocument.AFTER) return bool(entry) def get_cookie_master_secret(): assert _cookie_master_secret if FLAGS.disable_model_cache_for_testing: _ensure_cookie_secret() return _cookie_master_secret def perform_health_checks(): _increment_atomic_counter('test') def save_blob(blob, mimetype): if isinstance(blob, unicode): blob = blob.encode('ascii') assert isinstance(blob, str) key = hashlib.sha1(blob).hexdigest() if FLAGS.storage_gcs_bucket_name: storage.save('blobs/%s' % key, blob, mimetype=mimetype) else: try: _db.blobs.update_one( {'_id': key}, {'$setOnInsert': {'_id': key, 'value': bson.binary.Binary(blob)}}, upsert=True) except pymongo.errors.DuplicateKeyError: pass return key def load_blob(key): if FLAGS.storage_gcs_bucket_name: return storage.load('blobs/%s' % key) else: entry = _db.blobs.find_one({'_id': key}) if not entry: raise KeyError('Blob not found: %s' % key) return str(entry['value']) def get_signed_blob_url(key): if not FLAGS.storage_gcs_bucket_name: return None return storage.get_signed_url('blobs/%s' % key) def register_user(display_name, contact_email, member_names, nationalities, languages, source_url, remote_host): team_id = _increment_atomic_counter('user_counter') username = '%d' % team_id password = misc_util.generate_password() password_hash = sha256_crypt.encrypt(password) api_key = '%s-%s' % (username, misc_util.generate_random_id(32)) user = { '_id': username, 'password_hash': password_hash, 'api_key': api_key, 'display_name': display_name, 'contact_email': contact_email, 'member_names': member_names, 'nationalities': nationalities, 'languages': languages, 'source_url': source_url, 'create_time': misc_util.time(), 'register_remote_host': remote_host, 'organizer': False, } _db.users.insert_one(user) return (username, password) def update_user( username, display_name=None, contact_email=None, member_names=None, nationalities=None, languages=None, source_url=None): update = {} if display_name is not None: update['display_name'] = display_name if contact_email is not None: update['contact_email'] = contact_email if member_names is not None: update['member_names'] = member_names if nationalities is not None: update['nationalities'] = nationalities if languages is not None: update['languages'] = languages if source_url is not None: update['source_url'] = source_url if update: _db.users.update_one({'_id': username}, {'$set': update}) def get_user(username): user = _db.users.find_one({'_id': username}) if not user: raise KeyError('User not found: %s' % username) return user def get_user_problems(username): user = _db.users.find_one({'_id': username}) if not user: raise KeyError('User not found: %s' % username) cursor = _db.problems.find( { 'owner': user['_id'] }, projection=('owner', 'problem_size', 'solution_size', 'public', '_id')) problems = list(cursor) enhance_problems_for_admin(problems) return problems def get_user_solutions(username): user = _db.users.find_one({'_id': username}) if not user: raise KeyError('User not found: %s' % username) solutions = _db.solutions.find( { 'owner': user['_id'] }, projection=('resemblance_int', 'solution_size', 'problem_id', '_id')) table = {} for solution in solutions: problem_id = solution['problem_id'] if problem_id in table: old_solution = table[problem_id] if solution['resemblance_int'] > old_solution['resemblance_int'] or (solution['resemblance_int'] == old_solution['resemblance_int'] and solution['_id'] < old_solution['_id']): table[problem_id] = solution else: table[problem_id] = solution solutions = table.values() solutions.sort(key=lambda solution: solution['problem_id']) return solutions def get_user_map(usernames): users = list(_db.users.find({'_id': {'$in': list(set(usernames))}})) return {user['_id']: user for user in users} def get_user_by_api_key(api_key): if not api_key: raise KeyError('User not found by API key') user = _db.users.find_one({'api_key': api_key}) if not user: raise KeyError('User not found by API key') return user def get_all_users(**options): cursor = _db.users.find( {}, sort=[('create_time', pymongo.ASCENDING)], **options) return list(cursor) def enqueue_problem( owner, problem_spec, problem_size, solution_spec, solution_size, create_time, publish_time, processing_time, publish_immediately): assert isinstance(publish_time, int) problem_spec_hash = save_blob(problem_spec, mimetype='text/plain') solution_spec_hash = save_blob(solution_spec, mimetype='text/plain') new_problem = { '_id': _increment_atomic_counter('problem_counter'), 'create_time': create_time, 'owner': owner, 'problem_spec_hash': problem_spec_hash, 'problem_size': problem_size, 'solution_spec_hash': solution_spec_hash, 'solution_size': solution_size, 'public': publish_immediately, 'publish_time': publish_time, 'processing_time': processing_time, } _db.problems.insert_one(new_problem) return new_problem def get_public_problems(**options): cursor = _db.problems.find( {'public': True}, sort=[('_id', pymongo.ASCENDING)], **options) return list(cursor) def count_public_problems(): cursor = _db.problems.find( {'public': True}, sort=[('_id', pymongo.DESCENDING)]) return cursor.count() def get_public_problem(problem_id): problem = _db.problems.find_one({'_id': problem_id, 'public': True}) if not problem: raise KeyError('Problem not found: %s' % problem_id) return problem def count_all_problems_for_admin(): return _db.problems.find({}).count() def get_all_problems_for_admin(**options): cursor = _db.problems.find( {}, sort=[('_id', pymongo.ASCENDING)], **options) problems = list(cursor) enhance_problems_for_admin(problems) return problems
Apache License 2.0
bachili/diffvg
pydiffvg_tensorflow/device.py
set_cpu_device_id
python
def set_cpu_device_id(did: int): global cpu_device_id cpu_device_id = did
Set the cpu device id we are using.
https://github.com/bachili/diffvg/blob/e5955dbdcb4715ff3fc6cd7d74848a3aad87ec99/pydiffvg_tensorflow/device.py#L33-L38
import tensorflow as tf use_gpu = tf.test.is_gpu_available( cuda_only=True, min_cuda_compute_capability=None ) cpu_device_id = 0 gpu_device_id = 0 def get_device_name(): global use_gpu global cpu_device_id global gpu_device_id return '/device:gpu:' + str(gpu_device_id) if use_gpu else '/device:cpu:' + str(cpu_device_id) def set_use_gpu(v: bool): global use_gpu use_gpu = v def get_use_gpu(): global use_gpu return use_gpu
Apache License 2.0
formlio/forml
tests/io/dsl/_struct/test_frame.py
Queryable._condition
python
def _condition( cls, source: frame.Queryable, handler: typing.Callable[[frame.Queryable, series.Expression], frame.Query], target: typing.Callable[[frame.Query], series.Expression], ): cls._expression(source, handler, target) with pytest.raises(dsl.GrammarError): handler(source, source.score + 1) with pytest.raises(dsl.GrammarError): handler(source, source.reference().score == 'foo')
Common condition testing routine.
https://github.com/formlio/forml/blob/fd070da74a0107e37c0c643dd8df8680618fef74/tests/io/dsl/_struct/test_frame.py#L104-L115
import abc import typing import cloudpickle import pytest from forml.io import dsl from forml.io.dsl import _struct, function from forml.io.dsl._struct import frame, kind, series class Source(metaclass=abc.ABCMeta): @staticmethod @pytest.fixture(scope='session') @abc.abstractmethod def source() -> frame.Queryable: def test_identity(self, source: frame.Source, school: frame.Table): assert len({source, source, school}) == 2 def test_serilizable(self, source: frame.Source): assert cloudpickle.loads(cloudpickle.dumps(source)) == source def test_features(self, source: frame.Source, student: frame.Table): assert student.surname in source.features assert source.surname == student.surname assert source['surname'] == student['surname'] assert source.birthday == student.birthday assert source['birthday'] == student['birthday'] with pytest.raises(AttributeError): _ = student.xyz with pytest.raises(KeyError): _ = student['xyz'] def test_schema(self, source: frame.Source): assert issubclass(source.schema, _struct.Schema) class Queryable(Source, metaclass=abc.ABCMeta): def test_query(self, source: frame.Queryable): assert isinstance(source.query, frame.Query) def test_instance(self, source: frame.Source, student: frame.Table): assert source.instance.query == student.query def test_reference(self, source: frame.Queryable): assert isinstance(source.reference(), frame.Reference) def test_select(self, source: frame.Queryable): assert source.select(source.score).selection[0] == source.score assert source.select(source.score, source.surname).selection == (source.score, source.surname) with pytest.raises(dsl.GrammarError): source.select(source.reference().score) @classmethod def _expression( cls, source: frame.Queryable, handler: typing.Callable[[frame.Queryable, series.Expression], frame.Query], target: typing.Callable[[frame.Query], series.Expression], ): score = source.query.source.score with pytest.raises(dsl.GrammarError): handler(source, (score > 2).alias('foobar')) assert target(handler(source, score > 2)) == function.GreaterThan(score, series.Literal(2)) @classmethod
Apache License 2.0
osmr/imgclsmob
chainer_/chainercv2/models/bamresnet.py
bam_resnet18
python
def bam_resnet18(**kwargs): return get_resnet(blocks=18, model_name="bam_resnet18", **kwargs)
BAM-ResNet-18 model from 'BAM: Bottleneck Attention Module,' https://arxiv.org/abs/1807.06514. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.chainer/models' Location for keeping the model parameters.
https://github.com/osmr/imgclsmob/blob/ea5f784eea865ce830f3f97c5c1d1f6491d9cbb2/chainer_/chainercv2/models/bamresnet.py#L348-L359
__all__ = ['BamResNet', 'bam_resnet18', 'bam_resnet34', 'bam_resnet50', 'bam_resnet101', 'bam_resnet152'] import os import chainer.functions as F import chainer.links as L from chainer import Chain from functools import partial from chainer.serializers import load_npz from .common import SimpleSequential, conv1x1, conv1x1_block, conv3x3_block from .resnet import ResInitBlock, ResUnit class DenseBlock(Chain): def __init__(self, in_channels, out_channels): super(DenseBlock, self).__init__() with self.init_scope(): self.fc = L.Linear( in_size=in_channels, out_size=out_channels) self.bn = L.BatchNormalization( size=out_channels, eps=1e-5) self.activ = F.relu def __call__(self, x): x = self.fc(x) x = self.bn(x) x = self.activ(x) return x class ChannelGate(Chain): def __init__(self, channels, reduction_ratio=16, num_layers=1): super(ChannelGate, self).__init__() mid_channels = channels // reduction_ratio with self.init_scope(): self.init_fc = DenseBlock( in_channels=channels, out_channels=mid_channels) self.main_fcs = SimpleSequential() with self.main_fcs.init_scope(): for i in range(num_layers - 1): setattr(self.main_fcs, "fc{}".format(i + 1), DenseBlock( in_channels=mid_channels, out_channels=mid_channels)) self.final_fc = L.Linear( in_size=mid_channels, out_size=channels) def __call__(self, x): input_shape = x.shape x = F.average_pooling_2d(x, ksize=x.shape[2:]) x = F.reshape(x, shape=(x.shape[0], -1)) x = self.init_fc(x) x = self.main_fcs(x) x = self.final_fc(x) x = F.broadcast_to(F.expand_dims(F.expand_dims(x, axis=2), axis=3), input_shape) return x class SpatialGate(Chain): def __init__(self, channels, reduction_ratio=16, num_dil_convs=2, dilate=4): super(SpatialGate, self).__init__() mid_channels = channels // reduction_ratio with self.init_scope(): self.init_conv = conv1x1_block( in_channels=channels, out_channels=mid_channels, stride=1, use_bias=True) self.dil_convs = SimpleSequential() with self.dil_convs.init_scope(): for i in range(num_dil_convs): setattr(self.dil_convs, "conv{}".format(i + 1), conv3x3_block( in_channels=mid_channels, out_channels=mid_channels, stride=1, pad=dilate, dilate=dilate, use_bias=True)) self.final_conv = conv1x1( in_channels=mid_channels, out_channels=1, stride=1, use_bias=True) def __call__(self, x): input_shape = x.shape x = self.init_conv(x) x = self.dil_convs(x) x = self.final_conv(x) x = F.broadcast_to(x, input_shape) return x class BamBlock(Chain): def __init__(self, channels): super(BamBlock, self).__init__() with self.init_scope(): self.ch_att = ChannelGate(channels=channels) self.sp_att = SpatialGate(channels=channels) def __call__(self, x): att = 1 + F.sigmoid(self.ch_att(x) * self.sp_att(x)) x = x * att return x class BamResUnit(Chain): def __init__(self, in_channels, out_channels, stride, bottleneck): super(BamResUnit, self).__init__() self.use_bam = (stride != 1) with self.init_scope(): if self.use_bam: self.bam = BamBlock(channels=in_channels) self.res_unit = ResUnit( in_channels=in_channels, out_channels=out_channels, stride=stride, bottleneck=bottleneck, conv1_stride=False) def __call__(self, x): if self.use_bam: x = self.bam(x) x = self.res_unit(x) return x class BamResNet(Chain): def __init__(self, channels, init_block_channels, bottleneck, in_channels=3, in_size=(224, 224), classes=1000): super(BamResNet, self).__init__() self.in_size = in_size self.classes = classes with self.init_scope(): self.features = SimpleSequential() with self.features.init_scope(): setattr(self.features, "init_block", ResInitBlock( in_channels=in_channels, out_channels=init_block_channels)) in_channels = init_block_channels for i, channels_per_stage in enumerate(channels): stage = SimpleSequential() with stage.init_scope(): for j, out_channels in enumerate(channels_per_stage): stride = 2 if (j == 0) and (i != 0) else 1 setattr(stage, "unit{}".format(j + 1), BamResUnit( in_channels=in_channels, out_channels=out_channels, stride=stride, bottleneck=bottleneck)) in_channels = out_channels setattr(self.features, "stage{}".format(i + 1), stage) setattr(self.features, "final_pool", partial( F.average_pooling_2d, ksize=7, stride=1)) self.output = SimpleSequential() with self.output.init_scope(): setattr(self.output, "flatten", partial( F.reshape, shape=(-1, in_channels))) setattr(self.output, "fc", L.Linear( in_size=in_channels, out_size=classes)) def __call__(self, x): x = self.features(x) x = self.output(x) return x def get_resnet(blocks, model_name=None, pretrained=False, root=os.path.join("~", ".chainer", "models"), **kwargs): if blocks == 18: layers = [2, 2, 2, 2] elif blocks == 34: layers = [3, 4, 6, 3] elif blocks == 50: layers = [3, 4, 6, 3] elif blocks == 101: layers = [3, 4, 23, 3] elif blocks == 152: layers = [3, 8, 36, 3] else: raise ValueError("Unsupported BAM-ResNet with number of blocks: {}".format(blocks)) init_block_channels = 64 if blocks < 50: channels_per_layers = [64, 128, 256, 512] bottleneck = False else: channels_per_layers = [256, 512, 1024, 2048] bottleneck = True channels = [[ci] * li for (ci, li) in zip(channels_per_layers, layers)] net = BamResNet( channels=channels, init_block_channels=init_block_channels, bottleneck=bottleneck, **kwargs) if pretrained: if (model_name is None) or (not model_name): raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.") from .model_store import get_model_file load_npz( file=get_model_file( model_name=model_name, local_model_store_dir_path=root), obj=net) return net
MIT License
facebookresearch/mtrl
mtrl/agent/components/actor.py
Actor.__init__
python
def __init__( self, env_obs_shape: List[int], action_shape: List[int], hidden_dim: int, num_layers: int, log_std_bounds: Tuple[float, float], encoder_cfg: ConfigType, multitask_cfg: ConfigType, ): key = "type_to_select" if key in encoder_cfg: encoder_type_to_select = encoder_cfg[key] encoder_cfg = encoder_cfg[encoder_type_to_select] super().__init__( env_obs_shape=env_obs_shape, action_shape=action_shape, encoder_cfg=encoder_cfg, multitask_cfg=multitask_cfg, ) self.log_std_bounds = log_std_bounds self.should_use_multi_head_policy = check_if_should_use_multi_head_policy( multitask_cfg=multitask_cfg ) if self.should_use_multi_head_policy: task_index_to_mask = torch.eye(multitask_cfg.num_envs) self.moe_masks = moe_layer.MaskCache( task_index_to_mask=task_index_to_mask, **multitask_cfg.multi_head_policy_cfg.mask_cfg, ) if check_if_should_use_task_encoder(multitask_cfg): self.should_condition_model_on_task_info = False self.should_condition_encoder_on_task_info = True self.should_concatenate_task_info_with_encoder = True if "actor_cfg" in multitask_cfg and multitask_cfg.actor_cfg: self.should_condition_model_on_task_info = ( multitask_cfg.actor_cfg.should_condition_model_on_task_info ) self.should_condition_encoder_on_task_info = ( multitask_cfg.actor_cfg.should_condition_encoder_on_task_info ) self.should_concatenate_task_info_with_encoder = ( multitask_cfg.actor_cfg.should_concatenate_task_info_with_encoder ) else: self.should_condition_model_on_task_info = False self.should_condition_encoder_on_task_info = False self.should_concatenate_task_info_with_encoder = False self.encoder = self._make_encoder( env_obs_shape=env_obs_shape, encoder_cfg=encoder_cfg, multitask_cfg=multitask_cfg, ) self.model = self.make_model( action_shape=action_shape, hidden_dim=hidden_dim, num_layers=num_layers, encoder_cfg=encoder_cfg, multitask_cfg=multitask_cfg, ) self.apply(agent_utils.weight_init)
Actor component for the agent. Args: env_obs_shape (List[int]): shape of the environment observation that the actor gets. action_shape (List[int]): shape of the action vector that the actor produces. hidden_dim (int): hidden dimensionality of the actor. num_layers (int): number of layers in the actor. log_std_bounds (Tuple[float, float]): bounds to clip log of standard deviation. encoder_cfg (ConfigType): config for the encoder. multitask_cfg (ConfigType): config for encoding the multitask knowledge.
https://github.com/facebookresearch/mtrl/blob/184c7d39db21acc505cf7094ed87cd28a1735105/mtrl/agent/components/actor.py#L132-L210
from typing import List, Tuple import numpy as np import torch import torch.nn.functional as F from torch import nn from mtrl.agent import utils as agent_utils from mtrl.agent.components import base as base_component from mtrl.agent.components import encoder, moe_layer from mtrl.agent.components.soft_modularization import SoftModularizedMLP from mtrl.agent.ds.mt_obs import MTObs from mtrl.agent.ds.task_info import TaskInfo from mtrl.utils.types import ConfigType, ModelType, TensorType def check_if_should_use_multi_head_policy(multitask_cfg: ConfigType) -> bool: if "should_use_multi_head_policy" in multitask_cfg: return multitask_cfg.should_use_multi_head_policy return False def check_if_should_use_task_encoder(multitask_cfg: ConfigType) -> bool: if "should_use_task_encoder" in multitask_cfg: return multitask_cfg.should_use_task_encoder return False def _gaussian_logprob(noise: TensorType, log_std: TensorType) -> TensorType: residual = (-0.5 * noise.pow(2) - log_std).sum(-1, keepdim=True) return residual - 0.5 * np.log(2 * np.pi) * noise.size(-1) def _squash( mu: TensorType, pi: TensorType, log_pi: TensorType ) -> Tuple[TensorType, TensorType, TensorType]: mu = torch.tanh(mu) if pi is not None: pi = torch.tanh(pi) if log_pi is not None: log_pi -= torch.log(F.relu(1 - pi.pow(2)) + 1e-6).sum(-1, keepdim=True) return mu, pi, log_pi class BaseActor(base_component.Component): def __init__( self, env_obs_shape: List[int], action_shape: List[int], encoder_cfg: ConfigType, multitask_cfg: ConfigType, *args, **kwargs, ): super().__init__() self.multitask_cfg = multitask_cfg def encode(self, mtobs: MTObs, detach: bool = False) -> TensorType: raise NotImplementedError def forward( self, mtobs: MTObs, detach_encoder: bool = False, ) -> Tuple[TensorType, TensorType, TensorType, TensorType]: raise NotImplementedError class Actor(BaseActor):
MIT License
gehaxelt/python-rss2irc
bot.py
IRCBot.on_join
python
def on_join(self, connection, event): if not self.__first_start: self.send_msg(self.__config.CHANNEL, self.welcome_msg()) self.__on_connect_cb() self.__first_start = True
Say hello to other people in the channel.
https://github.com/gehaxelt/python-rss2irc/blob/edf8782aa2a9d1c68c0818f30cc5494efc1c9cc8/bot.py#L59-L64
import ssl import threading import irc.bot import irc.client import irc.connection import time import re import sys import feedparser import datetime import dateutil.parser import requests from colour import Colours from db import FeedDB from config import Config from feedupdater import FeedUpdater class IRCBot(irc.bot.SingleServerIRCBot): def __init__(self, config, db, on_connect_cb): self.__config = config self.__db = db self.__on_connect_cb = on_connect_cb self.__servers = [irc.bot.ServerSpec(self.__config.HOST, self.__config.PORT, self.__config.PASSWORD)] self.__first_start = False self.color_num = self.__config.num_col self.color_feedname = self.__config.feedname self.color_newstitle = self.__config.newstitle self.color_url = self.__config.url self.color_date = self.__config.date self.shorturls = self.__config.shorturls self.dateformat = self.__config.dateformat self.filterkeywords = self.__config.filterkeywords if self.__config.SSL: ssl_factory = irc.connection.Factory(wrapper=ssl.wrap_socket) print(datetime.datetime.now(), "Starting SSL connection.") sys.stdout.flush() super(IRCBot, self).__init__(self.__servers, self.__config.NICK, self.__config.NICK, connect_factory=ssl_factory) else: print(datetime.datetime.now(), "Starting connection.") sys.stdout.flush() super(IRCBot, self).__init__(self.__servers, self.__config.NICK, self.__config.NICK) def on_welcome(self, connection, event): if self.__config.NICKPASS: print(datetime.datetime.now(), "Starting authentication.") sys.stdout.flush() self.send_msg("NickServ", "IDENTIFY {}".format(self.__config.NICKPASS)) """Join the correct channel upon connecting""" if irc.client.is_channel(self.__config.CHANNEL): print(datetime.datetime.now(), "Joining to channel.") sys.stdout.flush() connection.join(self.__config.CHANNEL)
MIT License
amz-driverless/rbb_core
rbb_client/src/rbb_client/models/bag_summary.py
BagSummary.detail_type
python
def detail_type(self): return self._detail_type
Gets the detail_type of this BagSummary. :return: The detail_type of this BagSummary. :rtype: str
https://github.com/amz-driverless/rbb_core/blob/618617270314af5335de30179072244e1f440c4c/rbb_client/src/rbb_client/models/bag_summary.py#L89-L97
from pprint import pformat from six import iteritems class BagSummary(object): def __init__(self): self.swagger_types = { 'detail_type': 'str', 'name': 'str', 'store_data': 'object', 'discovered': 'datetime', 'extraction_failure': 'bool', 'in_trash': 'bool', 'is_extracted': 'bool', 'meta_available': 'bool', 'size': 'int', 'start_time': 'datetime', 'end_time': 'datetime', 'duration': 'float', 'messages': 'int', 'tags': 'list[Tag]' } self.attribute_map = { 'detail_type': 'detail_type', 'name': 'name', 'store_data': 'store_data', 'discovered': 'discovered', 'extraction_failure': 'extraction_failure', 'in_trash': 'in_trash', 'is_extracted': 'is_extracted', 'meta_available': 'meta_available', 'size': 'size', 'start_time': 'start_time', 'end_time': 'end_time', 'duration': 'duration', 'messages': 'messages', 'tags': 'tags' } self._detail_type = None self._name = None self._store_data = None self._discovered = None self._extraction_failure = None self._in_trash = None self._is_extracted = None self._meta_available = None self._size = None self._start_time = None self._end_time = None self._duration = None self._messages = None self._tags = None @property
MIT License
mortazavilab/talon
src/talon/post/filter_talon_transcripts.py
merge_reads_with_novelty
python
def merge_reads_with_novelty(reads, novelty): merged = pd.merge(reads, novelty, on = "transcript_ID", how = "left") return merged
Given a data frame of reads and a transcript novelty data frame, perform a left merge to annotate the reads with their novelty status.
https://github.com/mortazavilab/talon/blob/3824b55aad48aa1d2c8553f14c688eafe75aadb0/src/talon/post/filter_talon_transcripts.py#L226-L232
from optparse import OptionParser import sqlite3 from pathlib import Path from .. import query_utils as qutils from talon.post import get_read_annotations as read_annot import pandas as pd import os import warnings def getOptions(): parser = OptionParser(description = ("talon_filter_transcripts is a " "utility that filters the transcripts inside " "a TALON database to produce a transcript whitelist. " "This list can then be used by downstream analysis " "tools to determine which transcripts and other " "features should be reported (for example in a GTF file)")) parser.add_option("--db", dest = "database", help = "TALON database", metavar = "FILE", type = str) parser.add_option("--annot", "-a", dest = "annot", help = """Which annotation version to use. Will determine which annotation transcripts are considered known or novel relative to. Note: must be in the TALON database.""", type = "string") parser.add_option("--datasets", dest = "datasets", default = None, help = ("Datasets to include. Can be provided as a " "comma-delimited list on the command line, " "or as a file with one dataset per line. " "If this option is omitted, all datasets will " "be included.")) parser.add_option("--maxFracA", dest = "max_frac_A", default = 0.5, help = ("Maximum fraction of As to allow in the window " "located immediately after any read assigned to " "a novel transcript (helps to filter out internal " "priming artifacts). Default = 0.5. Use 1 if you prefer" "to not filter out internal priming events."), type = float) parser.add_option("--minCount", dest = "min_count", default = 5, type = int, help = ("Number of minimum occurrences required for a " "novel transcript PER dataset. Default = 5")) parser.add_option("--minDatasets", dest = "min_datasets", default = None, type = int, help = ("Minimum number of datasets novel transcripts " "must be found in. Default = all datasets provided")) parser.add_option("--allowGenomic", dest ="allow_genomic", action='store_true', help = ("If this option is set, transcripts from the Genomic " "novelty category will be permitted in the output " "(provided they pass the thresholds). Default " "behavior is to filter out genomic transcripts " "since they are unlikely to be real novel isoforms."), default = False) parser.add_option("--o", dest = "outfile", help = "Outfile name", metavar = "FILE", type = "string") (options, args) = parser.parse_args() return options def get_known_transcripts(database, annot, datasets = None): with sqlite3.connect(database) as conn: query = """SELECT DISTINCT gene_ID, transcript_ID FROM observed LEFT JOIN transcript_annotations AS ta ON ta.ID = observed.transcript_ID WHERE (ta.attribute = 'transcript_status' AND ta.value = 'KNOWN' AND ta.annot_name = '%s')""" % (annot) if datasets != None: datasets = qutils.format_for_IN(datasets) query += " AND observed.dataset IN " + datasets known = pd.read_sql_query(query, conn) return known def fetch_reads_in_datasets_fracA_cutoff(database, datasets, max_frac_A): if datasets == None: with sqlite3.connect(database) as conn: query = """SELECT dataset_name FROM dataset""" iter_datasets = pd.read_sql_query(query, conn).dataset_name.tolist() else: iter_datasets = datasets for dataset in iter_datasets: with sqlite3.connect(database) as conn: query = """SELECT read_name, gene_ID, transcript_ID, dataset, fraction_As FROM observed WHERE dataset='{}' LIMIT 0, 10""".format(dataset) data = pd.read_sql_query(query, conn) nans = all(data.fraction_As.isna().tolist()) if nans and max_frac_A != 1: print("Reads in dataset {} appear to be unlabelled. " "Only known transcripts will pass the filter.".format(dataset)) with sqlite3.connect(database) as conn: query = """SELECT read_name, gene_ID, transcript_ID, dataset, fraction_As FROM observed WHERE fraction_As <= %f""" % (max_frac_A) if datasets != None: datasets = qutils.format_for_IN(datasets) query += " AND dataset IN " + datasets data = pd.read_sql_query(query, conn) if len(data.index) == 0: print('No reads passed maxFracA cutoff. Is this expected?') return data def check_annot_validity(annot, database): conn = sqlite3.connect(database) cursor = conn.cursor() cursor.execute("SELECT DISTINCT annot_name FROM gene_annotations") annotations = [str(x[0]) for x in cursor.fetchall()] conn.close() if "TALON" in annotations: annotations.remove("TALON") if annot == None: message = "Please provide a valid annotation name. " + "In this database, your options are: " + ", ".join(annotations) raise ValueError(message) if annot not in annotations: message = "Annotation name '" + annot + "' not found in this database. Try one of the following: " + ", ".join(annotations) raise ValueError(message) return def check_db_version(database): conn = sqlite3.connect(database) cursor = conn.cursor() with sqlite3.connect(database) as conn: query = """SELECT value FROM run_info WHERE item='schema_version'""" ver = pd.read_sql_query(query, conn) if ver.empty: message = "Database version is not compatible with v5.0 filtering." raise ValueError(message) def parse_datasets(dataset_option, database): if dataset_option == None: print(("No dataset names specified, so filtering process will use all " "datasets present in the database.")) return None elif os.path.isfile(dataset_option): print("Parsing datasets from file %s..." % (dataset_option)) datasets = [] with open(dataset_option) as f: for line in f: line = line.strip() datasets.append(line) else: datasets = dataset_option.split(",") with sqlite3.connect(database) as conn: cursor = conn.cursor() valid_datasets = qutils.fetch_all_datasets(cursor) invalid_datasets = [] for dset in datasets: if dset not in valid_datasets: invalid_datasets.append(dset) if len(invalid_datasets) > 0: raise ValueError(("Problem parsing datasets. The following names are " "not in the database: '%s'. \nValid dataset names: '%s'") % (", ".join(invalid_datasets), ", ".join(valid_datasets))) else: print("Parsed the following dataset names successfully: %s" % (", ".join(datasets))) return datasets def get_novelty_df(database): transcript_novelty_dict = read_annot.get_transcript_novelty(database) transcript_novelty = pd.DataFrame.from_dict(transcript_novelty_dict, orient='index') transcript_novelty = transcript_novelty.reset_index() transcript_novelty.columns = ['transcript_ID', 'transcript_novelty'] return transcript_novelty
MIT License
dvershinin/lastversion
lastversion/ProjectHolder.py
ProjectHolder.is_official_for_repo
python
def is_official_for_repo(cls, repo): if repo.startswith(('https://', 'http://')): for url in cls.KNOWN_REPO_URLS: if repo.startswith((url, "https://{}".format(url), "http://{}".format(url))): log.info('{} Starts with {}'.format(repo, url)) return cls.KNOWN_REPO_URLS[url] else: if repo.lower() in cls.KNOWN_REPOS_BY_NAME: log.info('Selecting known repo {}'.format(repo)) return cls.KNOWN_REPOS_BY_NAME[repo.lower()] return False
Check if repo is a known repo for this type of project holder.
https://github.com/dvershinin/lastversion/blob/debbf1d1682aa02241334a9deb5c33a862b502ad/lastversion/ProjectHolder.py#L99-L110
import logging import os import re import requests from packaging.version import InvalidVersion from .Version import Version from .__about__ import __version__ from .utils import asset_does_not_belong_to_machine log = logging.getLogger(__name__) class ProjectHolder(requests.Session): """List of odd repos where last char is part of version not beta level.""" LAST_CHAR_FIX_REQUIRED_ON = [] DEFAULT_HOSTNAME = None DEFAULT_HOLDER = False KNOWN_REPO_URLS = {} KNOWN_REPOS_BY_NAME = {} REPO_URL_PROJECT_COMPONENTS = 2 REPO_URL_PROJECT_OFFSET = 0 RELEASE_URL_FORMAT = None SHORT_RELEASE_URL_FORMAT = None def set_repo(self, repo): self.repo = repo self.name = repo.split('/')[-1] def __init__(self): super(ProjectHolder, self).__init__() self.headers.update({'User-Agent': 'lastversion/{}'.format(__version__)}) log.info('Created instance of {}'.format(type(self).__name__)) self.branches = None self.only = None self.having_asset = None self.hostname = None self.repo = None self.name = None self.feed_url = None def is_valid(self): return self.feed_url or self.name def set_branches(self, branches): self.branches = branches def set_only(self, only): self.only = only if only: log.info('Only considering tags with "{}"'.format(only)) def set_having_asset(self, having_asset): self.having_asset = having_asset if having_asset: log.info('Only considering releases with asset "{}"'.format(having_asset)) @classmethod def get_host_repo_for_link(cls, repo): hostname = None if repo.startswith(('https://', 'http://')): url_parts = repo.split('/') hostname = url_parts[2] offset = 3 + cls.REPO_URL_PROJECT_OFFSET repo = "/".join(url_parts[offset:offset + cls.REPO_URL_PROJECT_COMPONENTS]) return hostname, repo @classmethod
BSD 2-Clause Simplified License
danielfrg/jupyterhub-kubernetes_spawner
kubernetes_spawner/swagger_client/models/v1_flex_volume_source.py
V1FlexVolumeSource.secret_ref
python
def secret_ref(self, secret_ref): self._secret_ref = secret_ref
Sets the secret_ref of this V1FlexVolumeSource. Optional: SecretRef is reference to the authentication secret for User, default is empty. :param secret_ref: The secret_ref of this V1FlexVolumeSource. :type: V1LocalObjectReference
https://github.com/danielfrg/jupyterhub-kubernetes_spawner/blob/15a2b63ef719f8c3ff83221333f7de69c1495512/kubernetes_spawner/swagger_client/models/v1_flex_volume_source.py#L117-L125
from pprint import pformat from six import iteritems class V1FlexVolumeSource(object): def __init__(self): self.swagger_types = { 'driver': 'str', 'fs_type': 'str', 'secret_ref': 'V1LocalObjectReference', 'read_only': 'bool', 'options': 'str' } self.attribute_map = { 'driver': 'driver', 'fs_type': 'fsType', 'secret_ref': 'secretRef', 'read_only': 'readOnly', 'options': 'options' } self._driver = None self._fs_type = None self._secret_ref = None self._read_only = None self._options = None @property def driver(self): return self._driver @driver.setter def driver(self, driver): self._driver = driver @property def fs_type(self): return self._fs_type @fs_type.setter def fs_type(self, fs_type): self._fs_type = fs_type @property def secret_ref(self): return self._secret_ref @secret_ref.setter
Apache License 2.0
aws/aws-encryption-sdk-python
src/aws_encryption_sdk/streaming_client.py
_EncryptionStream.tell
python
def tell(self): return self.bytes_read
Returns the current position in the stream.
https://github.com/aws/aws-encryption-sdk-python/blob/74af899221c8f40b268994607b421e6bf6c09240/src/aws_encryption_sdk/streaming_client.py#L280-L282
from __future__ import division import abc import hmac import io import logging import math import attr import six import aws_encryption_sdk.internal.utils from aws_encryption_sdk.exceptions import ( ActionNotAllowedError, AWSEncryptionSDKClientError, CustomMaximumValueExceeded, MasterKeyProviderError, MaxEncryptedDataKeysExceeded, NotSupportedError, SerializationError, ) from aws_encryption_sdk.identifiers import Algorithm, CommitmentPolicy, ContentType, SerializationVersion from aws_encryption_sdk.internal.crypto.authentication import Signer, Verifier from aws_encryption_sdk.internal.crypto.data_keys import calculate_commitment_key, derive_data_encryption_key from aws_encryption_sdk.internal.crypto.encryption import Decryptor, Encryptor, decrypt from aws_encryption_sdk.internal.crypto.iv import non_framed_body_iv from aws_encryption_sdk.internal.defaults import FRAME_LENGTH, LINE_LENGTH, MAX_NON_FRAMED_SIZE, TYPE, VERSION from aws_encryption_sdk.internal.formatting.deserialize import ( deserialize_footer, deserialize_frame, deserialize_header, deserialize_header_auth, deserialize_non_framed_values, deserialize_tag, validate_header, ) from aws_encryption_sdk.internal.formatting.encryption_context import assemble_content_aad from aws_encryption_sdk.internal.formatting.serialize import ( serialize_footer, serialize_frame, serialize_header, serialize_header_auth, serialize_non_framed_close, serialize_non_framed_open, ) from aws_encryption_sdk.internal.utils.commitment import ( validate_commitment_policy_on_decrypt, validate_commitment_policy_on_encrypt, ) from aws_encryption_sdk.internal.utils.signature import SignaturePolicy, validate_signature_policy_on_decrypt from aws_encryption_sdk.key_providers.base import MasterKeyProvider from aws_encryption_sdk.materials_managers import DecryptionMaterialsRequest, EncryptionMaterialsRequest from aws_encryption_sdk.materials_managers.base import CryptoMaterialsManager from aws_encryption_sdk.materials_managers.default import DefaultCryptoMaterialsManager from aws_encryption_sdk.structures import MessageHeader _LOGGER = logging.getLogger(__name__) @attr.s(hash=True) @six.add_metaclass(abc.ABCMeta) class _ClientConfig(object): source = attr.ib(hash=True, converter=aws_encryption_sdk.internal.utils.prep_stream_data) commitment_policy = attr.ib( hash=True, validator=attr.validators.instance_of(CommitmentPolicy), ) signature_policy = attr.ib( hash=True, default=SignaturePolicy.ALLOW_ENCRYPT_ALLOW_DECRYPT, validator=attr.validators.instance_of(SignaturePolicy), ) max_encrypted_data_keys = attr.ib( hash=True, default=None, validator=attr.validators.optional(attr.validators.instance_of(int)) ) materials_manager = attr.ib( hash=True, default=None, validator=attr.validators.optional(attr.validators.instance_of(CryptoMaterialsManager)) ) key_provider = attr.ib( hash=True, default=None, validator=attr.validators.optional(attr.validators.instance_of(MasterKeyProvider)) ) source_length = attr.ib( hash=True, default=None, validator=attr.validators.optional(attr.validators.instance_of(six.integer_types)) ) line_length = attr.ib( hash=True, default=LINE_LENGTH, validator=attr.validators.instance_of(six.integer_types) ) def __attrs_post_init__(self): both_cmm_and_mkp_defined = self.materials_manager is not None and self.key_provider is not None neither_cmm_nor_mkp_defined = self.materials_manager is None and self.key_provider is None if both_cmm_and_mkp_defined or neither_cmm_nor_mkp_defined: raise TypeError("Exactly one of materials_manager or key_provider must be provided") if self.materials_manager is None: self.materials_manager = DefaultCryptoMaterialsManager(master_key_provider=self.key_provider) class _EncryptionStream(io.IOBase): line_length = LINE_LENGTH config = None bytes_read = None output_buffer = None _message_prepped = None source_stream = None _stream_length = None def __new__(cls, **kwargs): if ( not (hasattr(cls, "_read_bytes") and callable(cls._read_bytes)) or not (hasattr(cls, "_prep_message") and callable(cls._read_bytes)) or not hasattr(cls, "_config_class") ): raise TypeError("Can't instantiate abstract class {}".format(cls.__name__)) instance = super(_EncryptionStream, cls).__new__(cls) config = kwargs.pop("config", None) if not isinstance(config, instance._config_class): config = instance._config_class(**kwargs) instance.config = config instance.bytes_read = 0 instance.output_buffer = b"" instance._message_prepped = False instance.source_stream = instance.config.source instance._stream_length = instance.config.source_length return instance @property def stream_length(self): if self._stream_length is None: try: current_position = self.source_stream.tell() self.source_stream.seek(0, 2) self._stream_length = self.source_stream.tell() self.source_stream.seek(current_position, 0) except Exception as error: raise NotSupportedError(error) return self._stream_length @property def header(self): if not self._message_prepped: self._prep_message() return self._header def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): try: self.close() except AWSEncryptionSDKClientError: _LOGGER.exception("Error on closing") return False def readable(self): return not self.closed def read(self, b=-1): if b is None or b < 0: b = -1 _LOGGER.debug("Stream read called, requesting %d bytes", b) output = io.BytesIO() if not self._message_prepped: self._prep_message() if self.closed: raise ValueError("I/O operation on closed file") if b >= 0: self._read_bytes(b) output.write(self.output_buffer[:b]) self.output_buffer = self.output_buffer[b:] else: while True: line = self.readline() if not line: break output.write(line) self.bytes_read += output.tell() _LOGGER.debug("Returning %d bytes of %d bytes requested", output.tell(), b) return output.getvalue()
Apache License 2.0
ebellocchia/bip_utils
bip_utils/bip/bip32/bip32_path.py
Bip32PathParser.__ParseElem
python
def __ParseElem(path_elem: str) -> int: path_elem = path_elem.strip() is_hardened = path_elem.endswith(Bip32PathConst.HARDENED_CHARS) if is_hardened: path_elem = path_elem[:-1] if not path_elem.isnumeric(): raise Bip32PathError(f"Invalid path element ({path_elem})") return int(path_elem) if not is_hardened else Bip32Utils.HardenIndex(int(path_elem))
Parse path element and get the correspondent index. Args: path_elem (str): Path element Returns: int: Index of the element, None if the element is not a valid index Raises: Bip32PathError: If the path is not valid
https://github.com/ebellocchia/bip_utils/blob/b04f9ef493a5b57983412c0ce460a9ca05ee1f50/bip_utils/bip/bip32/bip32_path.py#L181-L209
from typing import Iterator, List, Optional, Sequence, Tuple, Union from bip_utils.bip.bip32.bip32_ex import Bip32PathError from bip_utils.bip.bip32.bip32_key_data import Bip32KeyIndex from bip_utils.bip.bip32.bip32_utils import Bip32Utils class Bip32PathConst: HARDENED_CHARS: Tuple[str, str] = ("'", "p") MASTER_CHAR: str = "m" class Bip32Path: m_elems: List[Bip32KeyIndex] def __init__(self, elems: Optional[Sequence[Union[int, Bip32KeyIndex]]] = None) -> None: if elems is None: elems = [] try: self.m_elems = [Bip32KeyIndex(elem) if isinstance(elem, int) else elem for elem in elems] except ValueError as ex: raise Bip32PathError("The path contains some invalid key indexes") from ex def Length(self) -> int: return len(self.m_elems) def ToList(self) -> List[int]: return [int(elem) for elem in self.m_elems] def ToStr(self) -> str: path_str = "" for elem in self.m_elems: if not elem.IsHardened(): path_str += str(elem.ToInt()) + "/" else: path_str += str(Bip32Utils.UnhardenIndex(elem.ToInt())) + "'/" return path_str[:-1] def __str__(self) -> str: return self.ToStr() def __getitem__(self, idx: int) -> Bip32KeyIndex: return self.m_elems[idx] def __iter__(self) -> Iterator[Bip32KeyIndex]: yield from self.m_elems class Bip32PathParser: @staticmethod def Parse(path: str) -> Bip32Path: if path.endswith("/"): path = path[:-1] return Bip32PathParser.__ParseElements(path.split("/")) @staticmethod def __ParseElements(path_elems: List[str]) -> Bip32Path: if len(path_elems) > 0 and path_elems[0] == Bip32PathConst.MASTER_CHAR: path_elems = path_elems[1:] parsed_elems = list(map(Bip32PathParser.__ParseElem, path_elems)) return Bip32Path(parsed_elems) @staticmethod
MIT License
swaglyrics/swaglyrics-for-spotify
swaglyrics/__init__.py
user_data_dir
python
def user_data_dir(file_name): if sys.platform.startswith("win"): os_path = getenv("LOCALAPPDATA") elif sys.platform.startswith("darwin"): os_path = "~/Library/Application Support" else: os_path = getenv("XDG_DATA_HOME", "~/.local/share") path = Path(os_path) / "SwagLyrics" return path.expanduser() / file_name
r""" Get OS specific data directory path for SwagLyrics. Typical user data directories are: macOS: ~/Library/Application Support/SwagLyrics Unix: ~/.local/share/SwagLyrics # or in $XDG_DATA_HOME, if defined Win 10: C:\Users\<username>\AppData\Local\SwagLyrics For Unix, we follow the XDG spec and support $XDG_DATA_HOME if defined. :param file_name: file to be fetched from the data dir :return: full path to the user-specific data dir
https://github.com/swaglyrics/swaglyrics-for-spotify/blob/99fe764a9e45cac6cb9fcdf724c7d2f8cb4524fb/swaglyrics/__init__.py#L8-L32
import sys from os import getenv from pathlib import Path
MIT License
graphsense/graphsense-rest
openapi_server/models/stats_tool.py
StatsTool.__init__
python
def __init__(self, visible_name=None, id=None, version=None, titanium_replayable=None, responsible_for=None): self.openapi_types = { 'visible_name': str, 'id': str, 'version': str, 'titanium_replayable': bool, 'responsible_for': List[str] } self.attribute_map = { 'visible_name': 'visible_name', 'id': 'id', 'version': 'version', 'titanium_replayable': 'titanium_replayable', 'responsible_for': 'responsible_for' } self._visible_name = visible_name self._id = id self._version = version self._titanium_replayable = titanium_replayable self._responsible_for = responsible_for
StatsTool - a model defined in OpenAPI :param visible_name: The visible_name of this StatsTool. # noqa: E501 :type visible_name: str :param id: The id of this StatsTool. # noqa: E501 :type id: str :param version: The version of this StatsTool. # noqa: E501 :type version: str :param titanium_replayable: The titanium_replayable of this StatsTool. # noqa: E501 :type titanium_replayable: bool :param responsible_for: The responsible_for of this StatsTool. # noqa: E501 :type responsible_for: List[str]
https://github.com/graphsense/graphsense-rest/blob/2e4a9c20835e54d971e3fc3aae5780bc87d48647/openapi_server/models/stats_tool.py#L18-L52
from __future__ import absolute_import from datetime import date, datetime from typing import List, Dict from openapi_server.models.base_model_ import Model from openapi_server import util class StatsTool(Model):
MIT License
huntermcgushion/hyperparameter_hunter
hyperparameter_hunter/space/space_core.py
Space.distance
python
def distance(self, point_a, point_b): distance = 0.0 for a, b, dim in zip(point_a, point_b, self.dimensions): distance += dim.distance(a, b) return distance
Compute distance between two points in this space. Both `point_a` and `point_b` are expected to be of the same length as :attr:`dimensions`, with values corresponding to the `Dimension` bounds of :attr:`dimensions` Parameters ---------- point_a: List First point point_b: List Second point Returns ------- Number Distance between `point_a` and `point_b`
https://github.com/huntermcgushion/hyperparameter_hunter/blob/28b1d48e01a993818510811b82a677e0a7a232b2/hyperparameter_hunter/space/space_core.py#L429-L449
from hyperparameter_hunter.space.dimensions import Dimension, Real, Integer, Categorical from hyperparameter_hunter.utils.general_utils import short_repr from functools import reduce import numbers import numpy as np from sys import maxsize from sklearn.utils import check_random_state from sklearn.utils.fixes import sp_version NONE = object() def check_dimension(dimension, transform=None): if isinstance(dimension, Dimension): return dimension if not isinstance(dimension, (list, tuple, np.ndarray)): raise ValueError("Dimension has to be a list or tuple") kwargs = dict(transform=transform) if transform else {} if len(dimension) == 1: return Categorical(dimension, **kwargs) if len(dimension) == 2: if any([isinstance(d, (str, bool)) or isinstance(d, np.bool_) for d in dimension]): return Categorical(dimension, **kwargs) elif all([isinstance(dim, numbers.Integral) for dim in dimension]): return Integer(*dimension, **kwargs) elif any([isinstance(dim, numbers.Real) for dim in dimension]): return Real(*dimension, **kwargs) if len(dimension) == 3: if any([isinstance(dim, (float, int)) for dim in dimension[:2]]) and dimension[2] in [ "uniform", "log-uniform", ]: return Real(*dimension, **kwargs) else: return Categorical(dimension, **kwargs) if len(dimension) > 3: return Categorical(dimension, **kwargs) raise ValueError(f"Invalid `dimension` {dimension}. See documentation for supported types") class Space: def __init__(self, dimensions): self.dimensions = [check_dimension(dim) for dim in dimensions] def __eq__(self, other): return all([a == b for a, b in zip(self.dimensions, other.dimensions)]) def __repr__(self): dims = short_repr(self.dimensions, affix_size=15) return "Space([{}])".format(",\n ".join(map(str, dims))) def __iter__(self): return iter(self.dimensions) def __len__(self): if any(isinstance(_, Real) for _ in self.dimensions): search_space_size = maxsize else: search_space_size = reduce( lambda x, y: x * y, [ (_.high - _.low + 1) if isinstance(_, Integer) else len(_.bounds) for _ in self.dimensions ], 1, ) return search_space_size def __contains__(self, point): for component, dim in zip(point, self.dimensions): if component not in dim: return False return True def rvs(self, n_samples=1, random_state=None): rng = check_random_state(random_state) columns = [] for dim in self.dimensions: new_val = None try: if sp_version < (0, 16): new_val = dim.rvs(n_samples=n_samples) else: new_val = dim.rvs(n_samples=n_samples, random_state=rng) except TypeError: new_val = dim.rvs(n_samples=n_samples, random_state=rng) finally: columns.append(new_val) rows = [] for i in range(n_samples): r = [] for j in range(self.n_dims): r.append(columns[j][i]) rows.append(r) return rows def transform(self, data): columns = [[] for _ in self.dimensions] for i in range(len(data)): for j in range(self.n_dims): columns[j].append(data[i][j]) for j in range(self.n_dims): columns[j] = self.dimensions[j].transform(columns[j]) data_t = np.hstack([np.asarray(c).reshape((len(data), -1)) for c in columns]) return data_t def inverse_transform(self, data_t): columns = [] start = 0 for j in range(self.n_dims): dim = self.dimensions[j] offset = dim.transformed_size if offset == 1: columns.append(dim.inverse_transform(data_t[:, start])) else: columns.append(dim.inverse_transform(data_t[:, start : start + offset])) start += offset rows = [] for i in range(len(data_t)): r = [] for j in range(self.n_dims): r.append(columns[j][i]) rows.append(r) return rows @property def n_dims(self) -> int: return len(self.dimensions) @property def transformed_n_dims(self) -> int: return sum([dim.transformed_size for dim in self.dimensions]) @property def bounds(self): b = [] for dim in self.dimensions: if dim.size == 1: b.append(dim.bounds) else: b.extend(dim.bounds) return b @property def transformed_bounds(self): b = [] for dim in self.dimensions: if dim.transformed_size == 1: b.append(dim.transformed_bounds) else: b.extend(dim.transformed_bounds) return b @property def is_real(self): return all([isinstance(dim, Real) for dim in self.dimensions]) @property def is_categorical(self) -> bool: return all([isinstance(dim, Categorical) for dim in self.dimensions]) def names(self, use_location=True): names = [] for dimension in self.dimensions: if use_location and hasattr(dimension, "location") and dimension.location: names.append(dimension.location) else: names.append(dimension.name) return names def get_by_name(self, name, use_location=True, default=NONE): for dimension in self.dimensions: if use_location and getattr(dimension, "location", None) == name: return dimension elif dimension.name == name: return dimension if default != NONE: return default raise KeyError(f"{name} not found in dimensions")
MIT License
linkedin/detext
src/detext/layers/embedding_layer.py
EmbeddingLayer.embedding_lookup
python
def embedding_lookup(self, inputs): return tf.nn.embedding_lookup(params=self.embedding, ids=inputs)
Returns the embedding of the inputs :param inputs Tensor(dtype=int) Shape=[batch_size, sentence_len] :return Tensor(dtype=float) Shape[batch_size, sentence_len, num_units]
https://github.com/linkedin/detext/blob/e99c5ddc241dfb27d03729c69f0172f4d845aa3d/src/detext/layers/embedding_layer.py#L149-L155
from abc import abstractmethod, ABC import tensorflow as tf import tensorflow_hub as hub from absl import logging from detext.utils.parsing_utils import InternalFtrType from detext.layers import vocab_layer from detext.layers.vocab_layer import create_vocab_layer from detext.utils.layer_utils import init_word_embedding class EmbeddingLayerBase(ABC, tf.keras.layers.Layer): @tf.function @abstractmethod def tokenize_to_indices(self, inputs): pass @tf.function @abstractmethod def vocab_size(self): pass @tf.function @abstractmethod def embedding_lookup(self, inputs): pass @tf.function @abstractmethod def num_units(self): pass @tf.function(input_signature=[vocab_layer.INPUT_SIGNATURE]) def call(self, inputs): inputs = self.tokenize_to_indices(inputs) inputs[InternalFtrType.EMBEDDED] = self.embedding_lookup(inputs[InternalFtrType.TOKENIZED_IDS]) return inputs def create_embedding_layer(embedding_layer_param, embedding_hub_url): if embedding_hub_url: logging.info(f'Loading pretrained embedding layer from {embedding_hub_url}') embedding_layer = hub.KerasLayer(embedding_hub_url, trainable=embedding_layer_param['we_trainable']) embedding_obj = embedding_layer.resolved_object for method_name in ['__call__', 'tokenize_to_indices', 'num_units', 'vocab_size', 'embedding_lookup']: setattr(embedding_layer, method_name, getattr(embedding_obj, method_name)) return embedding_layer return EmbeddingLayer(**embedding_layer_param) class EmbeddingLayer(EmbeddingLayerBase): def __init__(self, vocab_layer_param, vocab_hub_url, we_file, we_trainable, num_units, name_prefix='w'): super().__init__() self.vocab_layer = create_vocab_layer(vocab_layer_param, vocab_hub_url=vocab_hub_url) self._num_units = num_units self._vocab_size = self.vocab_layer.vocab_size() self.embedding = init_word_embedding(self._vocab_size, num_units, we_trainable, we_file, name_prefix) @tf.function def tokenize_to_indices(self, inputs): return self.vocab_layer(inputs) @tf.function(input_signature=[]) def vocab_size(self): return self._vocab_size @tf.function(input_signature=[]) def num_units(self): return self._num_units @tf.function(input_signature=[tf.TensorSpec(shape=(None, None), dtype=tf.dtypes.int32)])
BSD 2-Clause Simplified License
nastools/homeassistant
homeassistant/components/media_player/roku.py
RokuDevice.volume_up
python
def volume_up(self): if self.current_app is not None: self.roku.volume_up()
Volume up media player.
https://github.com/nastools/homeassistant/blob/7ca1180bd42713f2d77bbc3f0b27b231ba8784aa/homeassistant/components/media_player/roku.py#L202-L205
import logging import voluptuous as vol from homeassistant.components.media_player import ( MEDIA_TYPE_VIDEO, SUPPORT_NEXT_TRACK, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_SELECT_SOURCE, MediaPlayerDevice, PLATFORM_SCHEMA) from homeassistant.const import ( CONF_HOST, STATE_IDLE, STATE_PLAYING, STATE_UNKNOWN, STATE_HOME) import homeassistant.helpers.config_validation as cv REQUIREMENTS = [ 'https://github.com/bah2830/python-roku/archive/3.1.2.zip' '#roku==3.1.2'] KNOWN_HOSTS = [] DEFAULT_PORT = 8060 _LOGGER = logging.getLogger(__name__) SUPPORT_ROKU = SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | SUPPORT_SELECT_SOURCE PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_HOST): cv.string, }) def setup_platform(hass, config, add_devices, discovery_info=None): hosts = [] if discovery_info and discovery_info in KNOWN_HOSTS: return if discovery_info is not None: _LOGGER.debug('Discovered Roku: %s', discovery_info[0]) hosts.append(discovery_info[0]) elif CONF_HOST in config: hosts.append(config.get(CONF_HOST)) rokus = [] for host in hosts: new_roku = RokuDevice(host) if new_roku.name is None: _LOGGER.error("Unable to initialize roku at %s", host) else: rokus.append(RokuDevice(host)) KNOWN_HOSTS.append(host) add_devices(rokus) class RokuDevice(MediaPlayerDevice): def __init__(self, host): from roku import Roku self.roku = Roku(host) self.roku_name = None self.ip_address = host self.channels = [] self.current_app = None self.update() def update(self): import requests.exceptions try: self.roku_name = "roku_" + self.roku.device_info.sernum self.ip_address = self.roku.host self.channels = self.get_source_list() if self.roku.current_app is not None: self.current_app = self.roku.current_app else: self.current_app = None except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout): pass def get_source_list(self): return ["Home"] + sorted(channel.name for channel in self.roku.apps) @property def should_poll(self): return True @property def name(self): return self.roku_name @property def state(self): if self.current_app is None: return STATE_UNKNOWN if self.current_app.name in ["Power Saver", "Default screensaver"]: return STATE_IDLE elif self.current_app.name == "Roku": return STATE_HOME elif self.current_app.name is not None: return STATE_PLAYING return STATE_UNKNOWN @property def supported_media_commands(self): return SUPPORT_ROKU @property def media_content_type(self): if self.current_app is None: return None elif self.current_app.name == "Power Saver": return None elif self.current_app.name == "Roku": return None else: return MEDIA_TYPE_VIDEO @property def media_image_url(self): if self.current_app is None: return None elif self.current_app.name == "Roku": return None elif self.current_app.name == "Power Saver": return None elif self.current_app.id is None: return None return 'http://{0}:{1}/query/icon/{2}'.format(self.ip_address, DEFAULT_PORT, self.current_app.id) @property def app_name(self): if self.current_app is not None: return self.current_app.name @property def app_id(self): if self.current_app is not None: return self.current_app.id @property def source(self): if self.current_app is not None: return self.current_app.name @property def source_list(self): return self.channels def media_play_pause(self): if self.current_app is not None: self.roku.play() def media_previous_track(self): if self.current_app is not None: self.roku.reverse() def media_next_track(self): if self.current_app is not None: self.roku.forward() def mute_volume(self, mute): if self.current_app is not None: self.roku.volume_mute()
MIT License
xiaomi/minos
supervisor/deployment/rpcinterface.py
DeploymentRPCInterface.start
python
def start(self, service, cluster, job, config_dict, instance_id=-1): return self._do_start(service, cluster, job, instance_id, **config_dict)
Start the specified job @param service the server name @param cluster the cluster name @param job the job name @param instance_id the instance id @param config_dict the config information dictionary @return string 'OK' on success, otherwise, the error message Note: config_dict must contain the following item: 1. start.sh 2. artifact 3. if any config files are needed, just put it in 'config_files' item config_dict can also contain the following optional items: 1. http_url: the server's http service url 2. package_name: package_name, revision, timestamp should be specified simultaneously, otherwise will be ignored 3. revision 4. timestamp 5. force_update This is an example: config_dict = { 'start.sh': $start_file_content, 'artifact': hadoop, 'config_files': { 'core-site.xml': $core_site_xml_content, ... }, 'http_url': 'http://10.235.3.67:11201', }
https://github.com/xiaomi/minos/blob/510b6e30758f4900a72fee1a5e6258bdc7c83f17/supervisor/deployment/rpcinterface.py#L223-L257
import ConfigParser import cStringIO import subprocess import os import time import urllib2 from exceptions import RuntimeError from supervisor.datatypes import DEFAULT_EXPECTED_EXIT_CODE from supervisor.http import NOT_DONE_YET from supervisor.options import ClientOptions from supervisor.rpcinterface import SupervisorNamespaceRPCInterface from supervisor.states import STOPPED_STATES from supervisor.supervisorctl import Controller from supervisor.xmlrpc import Faults from supervisor.xmlrpc import RPCError DEFAULT_PACKAGE_ROOT = '/home/work/packages' DEFAULT_APP_ROOT = '/home/work/app' DEFAULT_LOG_ROOT = '/home/work/log' DEFAULT_DATA_DIRS = '/home/work/data' CONFIG_PATH = 'conf' JOB_RUN_CONFIG = 'run.cfg' SUCCESS_STATUS = 'OK' class DeploymentRPCInterface: def __init__(self, supervisord, **config): self.supervisord = supervisord self.global_config = config self.supervisor_rpcinterface = SupervisorNamespaceRPCInterface(supervisord) self.package_server = config.get('package_server') self.download_package_uri = config.get('download_package_uri') self.get_latest_package_info_uri = config.get('get_latest_package_info_uri') def get_run_dir(self, service, cluster, job, instance_id=-1): app_root = self.global_config.get('app_root', DEFAULT_APP_ROOT) if instance_id == -1: return '%s/%s/%s/%s' % (app_root, service, cluster, job) else: return '%s/%s/%s/%s/%s' % (app_root, service, cluster, job, instance_id) def get_log_dir(self, service, cluster, job, instance_id=-1): log_root = self.global_config.get('log_root', DEFAULT_LOG_ROOT) if instance_id == -1: return '%s/%s/%s/%s' % (log_root, service, cluster, job) else: return '%s/%s/%s/%s/%s' % (log_root, service, cluster, job, instance_id) def get_stdout_dir(self, service, cluster, job, instance_id=-1): run_dir = self.get_run_dir(service, cluster, job, instance_id) return '%s/stdout' % run_dir def get_available_data_dirs(self, service, cluster, job, instance_id=-1): data_dirs = self.global_config.get('data_dirs', DEFAULT_DATA_DIRS) if instance_id == -1: return ['%s/%s/%s/%s' % (data_dir, service, cluster, job) for data_dir in data_dirs.split(',') ] else: return ['%s/%s/%s/%s/%s' % (data_dir, service, cluster, job, instance_id) for data_dir in data_dirs.split(',') ] def get_data_dirs(self, service, cluster, job, instance_id=-1): file_name = '%s/%s' % (self.get_run_dir(service, cluster, job, instance_id), JOB_RUN_CONFIG) if not os.path.exists(file_name): return 'You should bootstrapped the job first' data_dirs = self.get_available_data_dirs(service, cluster, job, instance_id) run_config = ConfigParser.SafeConfigParser() run_config.read([file_name]) data_dir_indexes = run_config.get('run_info', 'data_dir_indexes') job_data_dirs = [] for i in data_dir_indexes.split(','): job_data_dirs.append(data_dirs[int(i)]) return job_data_dirs def get_package_dir(self, service, cluster, job, instance_id=-1): return '%s/package' % self.get_run_dir(service, cluster, job, instance_id) def get_real_package_dir(self, service, cluster, job, instance_id=-1): return os.readlink(self.get_package_dir(service, cluster, job, instance_id)) def get_current_package_dir(self, service, cluster): package_root = self.global_config.get('package_root') return '%s/%s/%s/current' % (package_root, service, cluster) def get_cleanup_token(self, service, cluster, job, instance_id=-1): file_name = '%s/%s' % (self.get_run_dir(service, cluster, job, instance_id), JOB_RUN_CONFIG) if not os.path.exists(file_name): return 'You should bootstrap the job first' run_config = ConfigParser.SafeConfigParser() run_config.read([file_name]) return run_config.get('run_info', 'cleanup_token') def bootstrap(self, service, cluster, job, config_dict, instance_id=-1): return self._do_bootstrap(service, cluster, job, instance_id, **config_dict)
Apache License 2.0
osmr/imgclsmob
chainer_/chainercv2/models/fpenet.py
get_fpenet
python
def get_fpenet(model_name=None, pretrained=False, root=os.path.join("~", ".chainer", "models"), **kwargs): width = 16 channels = [int(width * (2 ** i)) for i in range(3)] init_block_channels = width layers = [1, 3, 9] meu_channels = [64, 32] use_se = False net = FPENet( layers=layers, channels=channels, init_block_channels=init_block_channels, meu_channels=meu_channels, use_se=use_se, **kwargs) if pretrained: if (model_name is None) or (not model_name): raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.") from .model_store import get_model_file load_npz( file=get_model_file( model_name=model_name, local_model_store_dir_path=root), obj=net) return net
Create FPENet model with specific parameters. Parameters: ---------- model_name : str or None, default None Model name for loading pretrained model. pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.chainer/models' Location for keeping the model parameters.
https://github.com/osmr/imgclsmob/blob/ea5f784eea865ce830f3f97c5c1d1f6491d9cbb2/chainer_/chainercv2/models/fpenet.py#L337-L378
__all__ = ['FPENet', 'fpenet_cityscapes'] import os import chainer.functions as F from chainer import Chain from chainer.serializers import load_npz from .common import conv1x1, conv1x1_block, conv3x3_block, SEBlock, InterpolationBlock, SimpleSequential, MultiOutputSequential class FPEBlock(Chain): def __init__(self, channels, **kwargs): super(FPEBlock, self).__init__(**kwargs) dilates = [1, 2, 4, 8] assert (channels % len(dilates) == 0) mid_channels = channels // len(dilates) with self.init_scope(): self.blocks = SimpleSequential() with self.blocks.init_scope(): for i, dilate in enumerate(dilates): setattr(self.blocks, "block{}".format(i + 1), conv3x3_block( in_channels=mid_channels, out_channels=mid_channels, groups=mid_channels, dilate=dilate, pad=dilate)) def __call__(self, x): xs = F.split_axis(x, indices_or_sections=len(self.blocks.layer_names), axis=1) ys = [] for bni, xsi in zip(self.blocks.layer_names, xs): bi = self.blocks[bni] if len(ys) == 0: ys.append(bi(xsi)) else: ys.append(bi(xsi + ys[-1])) x = F.concat(ys, axis=1) return x class FPEUnit(Chain): def __init__(self, in_channels, out_channels, stride, bottleneck_factor, use_se, **kwargs): super(FPEUnit, self).__init__(**kwargs) self.resize_identity = (in_channels != out_channels) or (stride != 1) self.use_se = use_se mid1_channels = in_channels * bottleneck_factor with self.init_scope(): self.conv1 = conv1x1_block( in_channels=in_channels, out_channels=mid1_channels, stride=stride) self.block = FPEBlock(channels=mid1_channels) self.conv2 = conv1x1_block( in_channels=mid1_channels, out_channels=out_channels, activation=None) if self.use_se: self.se = SEBlock(channels=out_channels) if self.resize_identity: self.identity_conv = conv1x1_block( in_channels=in_channels, out_channels=out_channels, stride=stride, activation=None) self.activ = F.relu def __call__(self, x): if self.resize_identity: identity = self.identity_conv(x) else: identity = x x = self.conv1(x) x = self.block(x) x = self.conv2(x) if self.use_se: x = self.se(x) x = x + identity x = self.activ(x) return x class FPEStage(Chain): def __init__(self, in_channels, out_channels, layers, use_se, **kwargs): super(FPEStage, self).__init__(**kwargs) self.use_block = (layers > 1) with self.init_scope(): if self.use_block: self.down = FPEUnit( in_channels=in_channels, out_channels=out_channels, stride=2, bottleneck_factor=4, use_se=use_se) self.blocks = SimpleSequential() with self.blocks.init_scope(): for i in range(layers - 1): setattr(self.blocks, "block{}".format(i + 1), FPEUnit( in_channels=out_channels, out_channels=out_channels, stride=1, bottleneck_factor=1, use_se=use_se)) else: self.down = FPEUnit( in_channels=in_channels, out_channels=out_channels, stride=1, bottleneck_factor=1, use_se=use_se) def __call__(self, x): x = self.down(x) if self.use_block: y = self.blocks(x) x = x + y return x class MEUBlock(Chain): def __init__(self, in_channels_high, in_channels_low, out_channels, **kwargs): super(MEUBlock, self).__init__(**kwargs) with self.init_scope(): self.conv_high = conv1x1_block( in_channels=in_channels_high, out_channels=out_channels, activation=None) self.conv_low = conv1x1_block( in_channels=in_channels_low, out_channels=out_channels, activation=None) self.conv_w_high = conv1x1( in_channels=out_channels, out_channels=out_channels) self.conv_w_low = conv1x1( in_channels=1, out_channels=1) self.sigmoid = F.sigmoid self.relu = F.relu self.up = InterpolationBlock( scale_factor=2, align_corners=True) def __call__(self, x_high, x_low): x_high = self.conv_high(x_high) x_low = self.conv_low(x_low) w_high = F.average_pooling_2d(x_high, ksize=x_high.shape[2:]) w_high = self.conv_w_high(w_high) w_high = self.relu(w_high) w_high = self.sigmoid(w_high) w_low = x_low.mean(axis=1, keepdims=True) w_low = self.conv_w_low(w_low) w_low = self.sigmoid(w_low) x_high = self.up(x_high) x_high = x_high * w_low x_low = x_low * w_high out = x_high + x_low return out class FPENet(Chain): def __init__(self, layers, channels, init_block_channels, meu_channels, use_se, aux=False, fixed_size=False, in_channels=3, in_size=(1024, 2048), classes=19, **kwargs): super(FPENet, self).__init__(**kwargs) assert (aux is not None) assert (fixed_size is not None) assert ((in_size[0] % 8 == 0) and (in_size[1] % 8 == 0)) self.in_size = in_size self.classes = classes self.fixed_size = fixed_size with self.init_scope(): self.stem = conv3x3_block( in_channels=in_channels, out_channels=init_block_channels, stride=2) in_channels = init_block_channels self.encoder = MultiOutputSequential(return_last=False) with self.encoder.init_scope(): for i, (layers_i, out_channels) in enumerate(zip(layers, channels)): stage = FPEStage( in_channels=in_channels, out_channels=out_channels, layers=layers_i, use_se=use_se) stage.do_output = True setattr(self.encoder, "stage{}".format(i + 1), stage) in_channels = out_channels self.meu1 = MEUBlock( in_channels_high=channels[-1], in_channels_low=channels[-2], out_channels=meu_channels[0]) self.meu2 = MEUBlock( in_channels_high=meu_channels[0], in_channels_low=channels[-3], out_channels=meu_channels[1]) in_channels = meu_channels[1] self.classifier = conv1x1( in_channels=in_channels, out_channels=classes, use_bias=True) self.up = InterpolationBlock( scale_factor=2, align_corners=True) def __call__(self, x): x = self.stem(x) y = self.encoder(x) x = self.meu1(y[2], y[1]) x = self.meu2(x, y[0]) x = self.classifier(x) x = self.up(x) return x
MIT License
dedsecinside/awesome-scripts
APIs/Telegram API/telethon/network/mtprotosender.py
MTProtoSender.__init__
python
def __init__(self, auth_key, *, loggers, retries=5, delay=1, auto_reconnect=True, connect_timeout=None, auth_key_callback=None, update_callback=None, auto_reconnect_callback=None): self._connection = None self._loggers = loggers self._log = loggers[__name__] self._retries = retries self._delay = delay self._auto_reconnect = auto_reconnect self._connect_timeout = connect_timeout self._auth_key_callback = auth_key_callback self._update_callback = update_callback self._auto_reconnect_callback = auto_reconnect_callback self._connect_lock = asyncio.Lock() self._user_connected = False self._reconnecting = False self._disconnected = asyncio.get_event_loop().create_future() self._disconnected.set_result(None) self._send_loop_handle = None self._recv_loop_handle = None self.auth_key = auth_key or AuthKey(None) self._state = MTProtoState(self.auth_key, loggers=self._loggers) self._send_queue = MessagePacker(self._state, loggers=self._loggers) self._pending_state = {} self._pending_ack = set() self._last_acks = collections.deque(maxlen=10) self._handlers = { RpcResult.CONSTRUCTOR_ID: self._handle_rpc_result, MessageContainer.CONSTRUCTOR_ID: self._handle_container, GzipPacked.CONSTRUCTOR_ID: self._handle_gzip_packed, Pong.CONSTRUCTOR_ID: self._handle_pong, BadServerSalt.CONSTRUCTOR_ID: self._handle_bad_server_salt, BadMsgNotification.CONSTRUCTOR_ID: self._handle_bad_notification, MsgDetailedInfo.CONSTRUCTOR_ID: self._handle_detailed_info, MsgNewDetailedInfo.CONSTRUCTOR_ID: self._handle_new_detailed_info, NewSessionCreated.CONSTRUCTOR_ID: self._handle_new_session_created, MsgsAck.CONSTRUCTOR_ID: self._handle_ack, FutureSalts.CONSTRUCTOR_ID: self._handle_future_salts, MsgsStateReq.CONSTRUCTOR_ID: self._handle_state_forgotten, MsgResendReq.CONSTRUCTOR_ID: self._handle_state_forgotten, MsgsAllInfo.CONSTRUCTOR_ID: self._handle_msg_all, }
Initialize a new client. Args: self: (todo): write your description auth_key: (str): write your description loggers: (todo): write your description retries: (todo): write your description delay: (float): write your description auto_reconnect: (bool): write your description connect_timeout: (float): write your description auth_key_callback: (callable): write your description update_callback: (todo): write your description auto_reconnect_callback: (todo): write your description
https://github.com/dedsecinside/awesome-scripts/blob/856835e5ff5f8a6af2d74bb25800c620feb712e3/APIs/Telegram API/telethon/network/mtprotosender.py#L43-L124
import asyncio import collections import struct from . import authenticator from ..extensions.messagepacker import MessagePacker from .mtprotoplainsender import MTProtoPlainSender from .requeststate import RequestState from .mtprotostate import MTProtoState from ..tl.tlobject import TLRequest from .. import helpers, utils from ..errors import ( BadMessageError, InvalidBufferError, SecurityError, TypeNotFoundError, rpc_message_to_error ) from ..extensions import BinaryReader from ..tl.core import RpcResult, MessageContainer, GzipPacked from ..tl.functions.auth import LogOutRequest from ..tl.types import ( MsgsAck, Pong, BadServerSalt, BadMsgNotification, FutureSalts, MsgNewDetailedInfo, NewSessionCreated, MsgDetailedInfo, MsgsStateReq, MsgsStateInfo, MsgsAllInfo, MsgResendReq, upload ) from ..crypto import AuthKey from ..helpers import retry_range class MTProtoSender:
MIT License
packtpublishing/-learn-artificial-intelligence-with-tensorflow
section3/components.py
glove_embed
python
def glove_embed(features, embed_shape, vocab_path, projector_path=None): with tf.variable_scope('glove_embed', values=[features]): embedding_matrix = get_embedding_matrix(embed_shape, vocab_path) embed_tensor = tf.get_variable( name='embed_tensor', initializer=tf.constant_initializer(embedding_matrix), dtype=tf.float32, trainable=False, shape=embed_shape) tf.summary.histogram('embed_tensor', embed_tensor) embedded_features = tf.nn.embedding_lookup(embed_tensor, features) if projector_path is not None: tf.logging.info('Setting up TensorBoard Projector.') init_embeddings_projector( vocab_path=vocab_path, tensor_name=embed_tensor.name, logdir=projector_path) return embedded_features
Loads and builds an embedding layer initialized with pre-trained GloVe embeddings, using only the words given in `vocab_path`. Args: features: int64 Tensor with shape (batch_size, max_seq_len) containing the integer ids we want to embed into vectors. embed_size: (int) dimensionality of the embedding layer to build. vocab_path: (str) full path to text file where each line contains a single word, and the number of lines defines the size of the vocabulary. projector_path (optional): path to store embedding information needed to link TensorBoard projector with word labels. Returns: embedded_features: float32 Tensor with shape (batch_size, max_seq_len, embed_size) containing the embedded `features`.
https://github.com/packtpublishing/-learn-artificial-intelligence-with-tensorflow/blob/98291ee60cab87757ee464282ee5966000437b73/section3/components.py#L46-L83
from tensorflow.contrib.tensorboard.plugins import projector import tensorflow as tf from util import glove import numpy as np from tensorflow.contrib.rnn import DropoutWrapper, LSTMCell, MultiRNNCell, LSTMStateTuple def init_embeddings_projector(vocab_path, tensor_name, logdir): config = projector.ProjectorConfig(embeddings=[projector.EmbeddingInfo( tensor_name=tensor_name, metadata_path=vocab_path)]) writer = tf.summary.FileWriter(logdir) projector.visualize_embeddings(writer, config) def get_embedding_matrix(embed_shape, vocab_path): with open(vocab_path) as f: vocabulary = list(map(str.strip, f.readlines())) word_vec = glove.get_glove(dim=embed_shape[-1]) emb_matrix = np.zeros(embed_shape) for i, word in enumerate(vocabulary): embed_vec = word_vec.get(word) if embed_vec is not None: emb_matrix[i] = embed_vec return emb_matrix
MIT License
azure/autorest.python
test/vanilla/version-tolerant/Expected/AcceptanceTests/BodyDateVersionTolerant/bodydateversiontolerant/aio/operations/_operations.py
DateOperations.get_invalid_date
python
async def get_invalid_date(self, **kwargs: Any) -> datetime.date: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_date_get_invalid_date_request( template_url=self.get_invalid_date.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: deserialized = response.json() else: deserialized = None if cls: return cls(pipeline_response, deserialized, {}) return deserialized
Get invalid date value. :return: date :rtype: ~datetime.date :raises: ~azure.core.exceptions.HttpResponseError
https://github.com/azure/autorest.python/blob/90d60a965788e3b4c0809e6686bdc3525acac89c/test/vanilla/version-tolerant/Expected/AcceptanceTests/BodyDateVersionTolerant/bodydateversiontolerant/aio/operations/_operations.py#L95-L126
import datetime import functools from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error, ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async from ...operations._operations import ( build_date_get_invalid_date_request, build_date_get_max_date_request, build_date_get_min_date_request, build_date_get_null_request, build_date_get_overflow_date_request, build_date_get_underflow_date_request, build_date_put_max_date_request, build_date_put_min_date_request, ) T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class DateOperations: def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace_async async def get_null(self, **kwargs: Any) -> Optional[datetime.date]: cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_date_get_null_request( template_url=self.get_null.metadata["url"], ) request.url = self._client.format_url(request.url) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response) if response.content: deserialized = response.json() else: deserialized = None if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_null.metadata = {"url": "/date/null"} @distributed_trace_async
MIT License
wavefronthq/python-client
wavefront_api_client/models/derived_metric_definition.py
DerivedMetricDefinition.tags
python
def tags(self, tags): self._tags = tags
Sets the tags of this DerivedMetricDefinition. :param tags: The tags of this DerivedMetricDefinition. # noqa: E501 :type: WFTags
https://github.com/wavefronthq/python-client/blob/e410ce0dd8a2334e995456f4f3d44e0f04664a3a/wavefront_api_client/models/derived_metric_definition.py#L773-L781
import pprint import re import six from wavefront_api_client.configuration import Configuration class DerivedMetricDefinition(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'additional_information': 'str', 'create_user_id': 'str', 'created': 'int', 'created_epoch_millis': 'int', 'creator_id': 'str', 'deleted': 'bool', 'hosts_used': 'list[str]', 'id': 'str', 'in_trash': 'bool', 'include_obsolete_metrics': 'bool', 'last_error_message': 'str', 'last_failed_time': 'int', 'last_processed_millis': 'int', 'last_query_time': 'int', 'metrics_used': 'list[str]', 'minutes': 'int', 'name': 'str', 'points_scanned_at_last_query': 'int', 'process_rate_minutes': 'int', 'query': 'str', 'query_failing': 'bool', 'query_qb_enabled': 'bool', 'query_qb_serialization': 'str', 'status': 'list[str]', 'tagpaths': 'list[str]', 'tags': 'WFTags', 'update_user_id': 'str', 'updated': 'int', 'updated_epoch_millis': 'int', 'updater_id': 'str' } attribute_map = { 'additional_information': 'additionalInformation', 'create_user_id': 'createUserId', 'created': 'created', 'created_epoch_millis': 'createdEpochMillis', 'creator_id': 'creatorId', 'deleted': 'deleted', 'hosts_used': 'hostsUsed', 'id': 'id', 'in_trash': 'inTrash', 'include_obsolete_metrics': 'includeObsoleteMetrics', 'last_error_message': 'lastErrorMessage', 'last_failed_time': 'lastFailedTime', 'last_processed_millis': 'lastProcessedMillis', 'last_query_time': 'lastQueryTime', 'metrics_used': 'metricsUsed', 'minutes': 'minutes', 'name': 'name', 'points_scanned_at_last_query': 'pointsScannedAtLastQuery', 'process_rate_minutes': 'processRateMinutes', 'query': 'query', 'query_failing': 'queryFailing', 'query_qb_enabled': 'queryQBEnabled', 'query_qb_serialization': 'queryQBSerialization', 'status': 'status', 'tagpaths': 'tagpaths', 'tags': 'tags', 'update_user_id': 'updateUserId', 'updated': 'updated', 'updated_epoch_millis': 'updatedEpochMillis', 'updater_id': 'updaterId' } def __init__(self, additional_information=None, create_user_id=None, created=None, created_epoch_millis=None, creator_id=None, deleted=None, hosts_used=None, id=None, in_trash=None, include_obsolete_metrics=None, last_error_message=None, last_failed_time=None, last_processed_millis=None, last_query_time=None, metrics_used=None, minutes=None, name=None, points_scanned_at_last_query=None, process_rate_minutes=None, query=None, query_failing=None, query_qb_enabled=None, query_qb_serialization=None, status=None, tagpaths=None, tags=None, update_user_id=None, updated=None, updated_epoch_millis=None, updater_id=None, _configuration=None): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._additional_information = None self._create_user_id = None self._created = None self._created_epoch_millis = None self._creator_id = None self._deleted = None self._hosts_used = None self._id = None self._in_trash = None self._include_obsolete_metrics = None self._last_error_message = None self._last_failed_time = None self._last_processed_millis = None self._last_query_time = None self._metrics_used = None self._minutes = None self._name = None self._points_scanned_at_last_query = None self._process_rate_minutes = None self._query = None self._query_failing = None self._query_qb_enabled = None self._query_qb_serialization = None self._status = None self._tagpaths = None self._tags = None self._update_user_id = None self._updated = None self._updated_epoch_millis = None self._updater_id = None self.discriminator = None if additional_information is not None: self.additional_information = additional_information if create_user_id is not None: self.create_user_id = create_user_id if created is not None: self.created = created if created_epoch_millis is not None: self.created_epoch_millis = created_epoch_millis if creator_id is not None: self.creator_id = creator_id if deleted is not None: self.deleted = deleted if hosts_used is not None: self.hosts_used = hosts_used if id is not None: self.id = id if in_trash is not None: self.in_trash = in_trash if include_obsolete_metrics is not None: self.include_obsolete_metrics = include_obsolete_metrics if last_error_message is not None: self.last_error_message = last_error_message if last_failed_time is not None: self.last_failed_time = last_failed_time if last_processed_millis is not None: self.last_processed_millis = last_processed_millis if last_query_time is not None: self.last_query_time = last_query_time if metrics_used is not None: self.metrics_used = metrics_used self.minutes = minutes self.name = name if points_scanned_at_last_query is not None: self.points_scanned_at_last_query = points_scanned_at_last_query if process_rate_minutes is not None: self.process_rate_minutes = process_rate_minutes self.query = query if query_failing is not None: self.query_failing = query_failing if query_qb_enabled is not None: self.query_qb_enabled = query_qb_enabled if query_qb_serialization is not None: self.query_qb_serialization = query_qb_serialization if status is not None: self.status = status if tagpaths is not None: self.tagpaths = tagpaths if tags is not None: self.tags = tags if update_user_id is not None: self.update_user_id = update_user_id if updated is not None: self.updated = updated if updated_epoch_millis is not None: self.updated_epoch_millis = updated_epoch_millis if updater_id is not None: self.updater_id = updater_id @property def additional_information(self): return self._additional_information @additional_information.setter def additional_information(self, additional_information): self._additional_information = additional_information @property def create_user_id(self): return self._create_user_id @create_user_id.setter def create_user_id(self, create_user_id): self._create_user_id = create_user_id @property def created(self): return self._created @created.setter def created(self, created): self._created = created @property def created_epoch_millis(self): return self._created_epoch_millis @created_epoch_millis.setter def created_epoch_millis(self, created_epoch_millis): self._created_epoch_millis = created_epoch_millis @property def creator_id(self): return self._creator_id @creator_id.setter def creator_id(self, creator_id): self._creator_id = creator_id @property def deleted(self): return self._deleted @deleted.setter def deleted(self, deleted): self._deleted = deleted @property def hosts_used(self): return self._hosts_used @hosts_used.setter def hosts_used(self, hosts_used): self._hosts_used = hosts_used @property def id(self): return self._id @id.setter def id(self, id): self._id = id @property def in_trash(self): return self._in_trash @in_trash.setter def in_trash(self, in_trash): self._in_trash = in_trash @property def include_obsolete_metrics(self): return self._include_obsolete_metrics @include_obsolete_metrics.setter def include_obsolete_metrics(self, include_obsolete_metrics): self._include_obsolete_metrics = include_obsolete_metrics @property def last_error_message(self): return self._last_error_message @last_error_message.setter def last_error_message(self, last_error_message): self._last_error_message = last_error_message @property def last_failed_time(self): return self._last_failed_time @last_failed_time.setter def last_failed_time(self, last_failed_time): self._last_failed_time = last_failed_time @property def last_processed_millis(self): return self._last_processed_millis @last_processed_millis.setter def last_processed_millis(self, last_processed_millis): self._last_processed_millis = last_processed_millis @property def last_query_time(self): return self._last_query_time @last_query_time.setter def last_query_time(self, last_query_time): self._last_query_time = last_query_time @property def metrics_used(self): return self._metrics_used @metrics_used.setter def metrics_used(self, metrics_used): self._metrics_used = metrics_used @property def minutes(self): return self._minutes @minutes.setter def minutes(self, minutes): if self._configuration.client_side_validation and minutes is None: raise ValueError("Invalid value for `minutes`, must not be `None`") self._minutes = minutes @property def name(self): return self._name @name.setter def name(self, name): if self._configuration.client_side_validation and name is None: raise ValueError("Invalid value for `name`, must not be `None`") self._name = name @property def points_scanned_at_last_query(self): return self._points_scanned_at_last_query @points_scanned_at_last_query.setter def points_scanned_at_last_query(self, points_scanned_at_last_query): self._points_scanned_at_last_query = points_scanned_at_last_query @property def process_rate_minutes(self): return self._process_rate_minutes @process_rate_minutes.setter def process_rate_minutes(self, process_rate_minutes): self._process_rate_minutes = process_rate_minutes @property def query(self): return self._query @query.setter def query(self, query): if self._configuration.client_side_validation and query is None: raise ValueError("Invalid value for `query`, must not be `None`") self._query = query @property def query_failing(self): return self._query_failing @query_failing.setter def query_failing(self, query_failing): self._query_failing = query_failing @property def query_qb_enabled(self): return self._query_qb_enabled @query_qb_enabled.setter def query_qb_enabled(self, query_qb_enabled): self._query_qb_enabled = query_qb_enabled @property def query_qb_serialization(self): return self._query_qb_serialization @query_qb_serialization.setter def query_qb_serialization(self, query_qb_serialization): self._query_qb_serialization = query_qb_serialization @property def status(self): return self._status @status.setter def status(self, status): self._status = status @property def tagpaths(self): return self._tagpaths @tagpaths.setter def tagpaths(self, tagpaths): self._tagpaths = tagpaths @property def tags(self): return self._tags @tags.setter
Apache License 2.0
jgorset/django-respite
respite/utils/parsers.py
parse_content_type
python
def parse_content_type(content_type): if '; charset=' in content_type: return tuple(content_type.split('; charset=')) else: if 'text' in content_type: encoding = 'ISO-8859-1' else: try: format = formats.find_by_content_type(content_type) except formats.UnknownFormat: encoding = 'ISO-8859-1' else: encoding = format.default_encoding or 'ISO-8859-1' return (content_type, encoding)
Return a tuple of content type and charset. :param content_type: A string describing a content type.
https://github.com/jgorset/django-respite/blob/719469d11baf91d05917bab1623bd82adc543546/respite/utils/parsers.py#L10-L29
from django.http.multipartparser import MultiPartParser from respite import formats try: from cStringIO import StringIO except ImportError: from StringIO import StringIO
MIT License
lil-lab/atis
anonymization.py
Anonymizer.get_anon_id
python
def get_anon_id(self, token): if self.is_anon_tok(token): return self.entity_types.index(token.split(SEPARATOR)[0]) else: return -1
Gets the entity index (unique ID) for a token. Input: token (str): The token to get the index from. Returns: int, the token ID if it is an anonymized token; otherwise -1.
https://github.com/lil-lab/atis/blob/08a17a7be8cd7b40d2f35e089947df4d543b3321/anonymization.py#L130-L142
import copy import json import util ENTITY_NAME = "ENTITY" CONSTANT_NAME = "CONSTANT" TIME_NAME = "TIME" SEPARATOR = "#" def timeval(string): if string.endswith("am") or string.endswith( "pm") and string[:-2].isdigit(): numval = int(string[:-2]) if len(string) == 3 or len(string) == 4: numval *= 100 if string.endswith("pm"): numval += 1200 return str(numval) return "" def is_time(string): if string.endswith("am") or string.endswith("pm"): if string[:-2].isdigit(): return True return False def deanonymize(sequence, ent_dict, key): new_sequence = [] for token in sequence: if token in ent_dict: new_sequence.extend(ent_dict[token][key]) else: new_sequence.append(token) return new_sequence class Anonymizer: def __init__(self, filename): self.anonymization_map = [] self.entity_types = [] self.keys = set() pairs = [json.loads(line) for line in open(filename).readlines()] for pair in pairs: for key in pair: if key != "type": self.keys.add(key) self.anonymization_map.append(pair) if pair["type"] not in self.entity_types: self.entity_types.append(pair["type"]) self.entity_types.append(ENTITY_NAME) self.entity_types.append(CONSTANT_NAME) self.entity_types.append(TIME_NAME) self.entity_set = set(self.entity_types) def get_entity_type_from_token(self, token): colon_loc = token.index(SEPARATOR) entity_type = token[:colon_loc] assert entity_type in self.entity_set return entity_type def is_anon_tok(self, token): return token.split(SEPARATOR)[0] in self.entity_set
MIT License
briannemsick/barrage
barrage/api.py
RecordTransformer.network_params
python
def network_params(self) -> dict: return self._network_params
Special params passed to the network builder.
https://github.com/briannemsick/barrage/blob/f86bd0723abc0ab94b0b8f2ca3ffa5e3b7541455/barrage/api.py#L119-L121
import abc import enum from typing import Any, Dict, List, Tuple, Union import numpy as np import pandas as pd Record = Dict[str, Any] Records = List[Record] InputRecords = Union[Records, pd.DataFrame] DataRecord = Tuple[Dict[str, Union[np.ndarray, float]], ...] BatchDataRecords = Tuple[Dict[str, np.ndarray], ...] RecordScore = Dict[str, np.ndarray] BatchRecordScores = List[RecordScore] class RecordMode(enum.Enum): TRAIN = 0 VALIDATION = 1 SCORE = 2 class RecordLoader(abc.ABC): def __init__(self, mode: RecordMode, **params): self.mode = mode def __call__(self, record: Record) -> DataRecord: return self.load(record) @abc.abstractmethod def load(self, record: Record) -> DataRecord: raise NotImplementedError() class RecordTransformer(abc.ABC): def __init__(self, mode: RecordMode, loader: RecordLoader, **params): self.mode = mode self.loader = loader self._network_params = {} @abc.abstractmethod def fit(self, records: Records): raise NotImplementedError() @abc.abstractmethod def transform(self, data_record: DataRecord) -> DataRecord: raise NotImplementedError() @abc.abstractmethod def postprocess(self, score: RecordScore) -> RecordScore: raise NotImplementedError() @abc.abstractmethod def load(self, path: str): raise NotImplementedError() @abc.abstractmethod def save(self, path: str): raise NotImplementedError() @property
MIT License
michael-f-ellis/tbon
parser.py
MidiPreEvaluator.beat
python
def beat(self, node, children): state = self.processing_state mult, numer = TIMESIG_LUT[state['beatspec']] beat_length = 4 * mult / numer subbeat_length = beat_length/state['subbeats'] subbeats = [] for n in range(state['subbeats']): subbeats.append(state['beat_index'] + (n * subbeat_length)) state['subbeat_starts'].append(tuple(subbeats)) state['subbeats'] = 0 state['beat_index'] += beat_length state['bar_beat_count'] += 1 if state['beat_index'] == beat_length: for m in self.meta_output: if m[0] == 'T': break else: self.insert_tempo_meta(state, index=0) state['subbeat_lengths'].append(subbeat_length) self.beat_lengths.append(beat_length)
Compute subbeat duration for current beat in current tempo. DESIGN NOTE: tbon will not support changing the tempo within a beat.
https://github.com/michael-f-ellis/tbon/blob/e98172443b73b8309a6963bffe9ccbc767766c54/parser.py#L284-L312
import keysigs from parsimonious.grammar import Grammar def parse(source): grammar = Grammar( """ score = wsc* music* music = (partswitch* bar+)+ wsc* partswitch = "P=" partnum wsc = comment / ws+ comment = ws* ~r"/\*.*?\*/"s ws* bar = (wsc* (meta / beat) wsc+)+ barline meta = beatspec / key / tempo / relativetempo / velocity / de_emphasis / channel / instrument beatspec = "B=" ("2." / "2" / "4." / "4" / "8." / "8") key = "K=" keyname keyname = ~r"[a-gA-G](@|#)?" tempo = "T=" floatnum relativetempo = "t=" floatnum velocity = "V=" floatnum de_emphasis = "D=" floatnum channel = "C=" chnum partnum = ~r"[1-9][0-9]*"i instrument = "I=" inum inum = ~r"[1-9][0-9]*"i floatnum = ~r"\d*\.?\d+"i chnum = ~r"\d*\.?\d+"i beat = subbeat+ barline = "|" / ":" extendable = chord / roll / ornament / pitch / rest pitch = octave* alteration? pitchname chord = chordstart chorditem chorditem* rparen chordstart = "(" chorditem = chordpitch / chordhold / chordrest chordpitch = octave* alteration? pitchname chordhold = '-' chordrest = "_" / "z" rparen = ")" roll = rollstart pitch pitch+ rparen rollstart = "(:" ornament = ornamentstart pitch pitch+ rparen ornamentstart = "(~" subbeat = extendable / hold rest = "_" / "z" hold = "-" octave = octave_up / octave_down alteration = doublesharp / sharp / doubleflat / flat / natural doublesharp = "𝄪" / "##" sharp = "♯" / "#" doubleflat = "𝄫" / "@@" flat = "♭" / "@" natural = "♮" / "%" octave_up = "^" octave_down = "/" pitchname = ~"[a-g1-7]"i ws = ~r"\s*"i """ ) return grammar.parse(source) NOTE = 0 CHORD = 1 ROLL = 2 ORNAMENT = 3 TIMESIG_LUT = { "2.":(3, 4), "2":(1, 2), "4.":(3, 8), "4":(1, 4), "8.":(3, 16), "8":(1, 8), } def time_signature(beatspec, beatcount, index, part): multiplier, numerator = TIMESIG_LUT[beatspec] return ('M', index, multiplier*beatcount, numerator, part) class MidiPreEvaluator(): def __init__(self): self.first_tempo = 120 self.output = [] self.meta_output = [] self.beat_map = {1: []} self.beat_lengths = [] self.subbeat_starts = [] self.subbeat_lengths = [] self.partstates = {0: self.new_part_state(0)} self.processing_state = self.partstates[0] self.current_part = 0 def eval(self, source, verbosity=2): node = parse(source) if isinstance(source, str) else source method = getattr(self, node.expr_name, lambda node, children: children) method(node, [self.eval(n, verbosity) for n in node]) self.show_progress(node, verbosity) return self.output def show_progress(self, node, verbosity): if verbosity <= 0: return if node.expr_name not in ('', 'ws', None): print("Evaluated {}, '{}'".format(node.expr_name, node.text)) print("output={}".format(self.output)) if verbosity > 1: print('state={}'.format(self.processing_state)) def score(self, node, children): if len(self.partstates) == 1: d = self.partstates[0] self.subbeat_starts = d['subbeat_starts'] for n in d['subbeat_lengths']: self.subbeat_lengths.append(n) else: for _, d in self.partstates.items(): self.subbeat_lengths.append(tuple(d['subbeat_lengths'])) self.subbeat_starts.append(d['subbeat_starts']) def partswitch(self, node, children): newpartnumber = int(node.children[1].text) newpindex = newpartnumber - 1 try: self.processing_state = self.partstates[newpindex] except KeyError: pstate = self.new_part_state(newpindex) self.partstates[newpindex] = pstate self.processing_state = self.partstates[newpindex] self.current_part = newpindex self.beat_map[newpartnumber] = [] def new_part_state(self, pindex): return dict( basetempo=self.first_tempo, tempo=self.first_tempo, beat_index=0, bar_beat_count=0, channel=1, in_chord=False, chord_tone_count=0, subbeats=0, beatspec="4", timesig=('M', 0.0, 4, 4, pindex), subbeat_lengths=[], subbeat_starts=[], ) def channel(self, node, children): state = self.processing_state newchannel = int(node.children[1].text) if 1 <= newchannel <= 16: state['channel'] = newchannel else: msg = ("\nInvalid channel number, {}. " "Must be between 1 and 16, inclusive.") raise ValueError(msg.format(newchannel)) def instrument(self, node, children): state = self.processing_state newinstrument = int(node.children[1].text) if 1 <= newinstrument <= 128: index = state['beat_index'] state['instrument'] = newinstrument chan = state['channel'] part = self.current_part self.meta_output.append(('I', index, newinstrument, part, chan)) else: msg = ("\nInvalid instrument number, {}. " "Must be between 1 and 128, inclusive.") raise ValueError(msg.format(newinstrument)) def tempo(self, node, children): if self.current_part == 0: state = self.processing_state newtempo = int(round(float(node.children[1].text))) if newtempo > 1: state['basetempo'] = state['tempo'] = newtempo else: msg = ("\nInvalid Tempo, {}. " "Tempo must be greater than 1.") raise ValueError(msg.format(newtempo)) self.insert_tempo_meta(state) else: print( "Ignoring tempo spec in part {}.".format(self.current_part)) def key(self, node, children): state = self.processing_state keyname = node.children[1].text.strip() if keyname in keysigs.KEYSIGS.keys(): self.processing_state['keyname'] = keyname else: msg = ("\n Invalid key name, '{}'. " "Must be one of {}.") validkeys = ', '.join(sorted(keysigs.KEYSIGS.keys())) raise ValueError(msg.format(keyname, validkeys)) index = state['beat_index'] sig = keysigs.MIDISIGS[keyname] part = self.current_part self.meta_output.append(('K', index, sig, part)) def relativetempo(self, node, children): if self.current_part == 0: state = self.processing_state xtempo = float(node.children[1].text) if xtempo > 0.0: state['tempo'] = int(round(xtempo * state['basetempo'])) else: msg = ("\nInvalid relative tempo, {}. " "Must be greater than 0.0") raise ValueError(msg.format(xtempo)) self.insert_tempo_meta(state) else: print( "Ignoring tempo spec in part {}.".format(self.current_part)) def beatspec(self, node, children): state = self.processing_state oldbeatspec = state['beatspec'] newbeatspec = node.children[1].text if oldbeatspec != newbeatspec: state['beatspec'] = newbeatspec def subbeat(self, node, children): state = self.processing_state state['subbeats'] += 1
MIT License
qk4l/zabbix-cachet
zabbix-cachet.py
Cachet.new_components
python
def new_components(self, name, **kwargs): params = {'name': name, 'link': '', 'description': '', 'status': '1', 'group_id': 0} params.update(kwargs) for i in ('link', 'description'): if str(params[i]).strip() == '': params.pop(i) component = self.get_components(name) if isinstance(component, list): for i in component: if i['group_id'] == params['group_id']: return i elif isinstance(component, dict): if not component['id'] == 0 and component.get('group_id', None) == params['group_id']: return component url = 'components' logging.debug('Creating Cachet component {name}...'.format(name=params['name'])) data = self._http_post(url, params) logging.info('Component {name} was created in group id {group_id}.'.format(name=params['name'], group_id=data['data'][ 'group_id'])) return data['data']
Create new components @param name: string @param kwargs: various additional values =) @return: dict of data
https://github.com/qk4l/zabbix-cachet/blob/4cb917a2182cfee7519f4534cde129b560ba45e0/zabbix-cachet.py#L316-L349
import sys import os import datetime import json import requests import time import threading import logging import yaml import pytz from pyzabbix import ZabbixAPI, ZabbixAPIException from operator import itemgetter __author__ = 'Artem Alexandrov <qk4l()tem4uk.ru>' __license__ = """The MIT License (MIT)""" __version__ = '1.3.7' def client_http_error(url, code, message): logging.error('ClientHttpError[%s, %s: %s]' % (url, code, message)) def cachetapiexception(message): logging.error(message) def pyzabbix_safe(fail_result=False): def wrap(func): def wrapperd_f(*args, **kwargs): try: return func(*args, **kwargs) except (requests.ConnectionError, ZabbixAPIException) as e: logging.error('Zabbix Error: {}'.format(e)) return fail_result return wrapperd_f return wrap class Zabbix: def __init__(self, server, user, password, verify=True): self.server = server self.user = user self.password = password s = requests.Session() s.auth = (user, password) self.zapi = ZabbixAPI(server, s) self.zapi.session.verify = verify self.zapi.login(user, password) self.version = self.get_version() @pyzabbix_safe() def get_version(self): version = self.zapi.apiinfo.version() return version @pyzabbix_safe({}) def get_trigger(self, triggerid): trigger = self.zapi.trigger.get( expandComment='true', expandDescription='true', triggerids=triggerid) return trigger[0] @pyzabbix_safe({}) def get_event(self, triggerid): zbx_event = self.zapi.event.get( select_acknowledges='extend', expandDescription='true', object=0, value=1, objectids=triggerid) if len(zbx_event) >= 1: return zbx_event[-1] return zbx_event @pyzabbix_safe([]) def get_itservices(self, root=None): if root: root_service = self.zapi.service.get( selectDependencies='extend', filter={'name': root}) try: root_service = root_service[0] except IndexError: logging.error('Can not find "{}" service in Zabbix'.format(root)) sys.exit(1) service_ids = [] for dependency in root_service['dependencies']: service_ids.append(dependency['serviceid']) services = self.zapi.service.get( selectDependencies='extend', serviceids=service_ids) else: services = self.zapi.service.get( selectDependencies='extend', output='extend') if not services: logging.error('Can not find any child service for "{}"'.format(root)) return [] known_ids = [] service_tree = [i for i in services if i['dependencies']] for idx, service in enumerate(service_tree): child_services_ids = [] for dependency in service['dependencies']: child_services_ids.append(dependency['serviceid']) child_services = self.zapi.service.get( selectDependencies='extend', serviceids=child_services_ids) service_tree[idx]['dependencies'] = child_services known_ids = known_ids + child_services_ids known_ids.append(service['serviceid']) singers_services = [i for i in services if i['serviceid'] not in known_ids] if singers_services: service_tree = service_tree + singers_services return service_tree class Cachet: def __init__(self, server, token, verify=True): self.server = server + '/api/v1/' self.token = token self.headers = {'X-Cachet-Token': self.token, 'Accept': 'application/json; indent=4'} self.verify = verify self.version = self.get_version() def _http_post(self, url, params): url = self.server + url logging.debug("Sending to {url}: {param}".format(url=url, param=json.dumps(params, indent=4, separators=(',', ': ')))) try: r = requests.post(url=url, data=params, headers=self.headers, verify=self.verify) except requests.exceptions.RequestException as e: raise client_http_error(url, None, e) if r.status_code != 200: return client_http_error(url, r.status_code, r.text) try: r_json = json.loads(r.text) except ValueError: raise cachetapiexception( "Unable to parse json: %s" % r.text ) logging.debug("Response Body: %s", json.dumps(r_json, indent=4, separators=(',', ': '))) return r_json def _http_get(self, url, params=None): if params is None: params = {} url = self.server + url logging.debug("Sending to {url}: {param}".format(url=url, param=json.dumps(params, indent=4, separators=(',', ': ')))) try: r = requests.get(url=url, headers=self.headers, params=params, verify=self.verify) except requests.exceptions.RequestException as e: raise client_http_error(url, None, e) if r.status_code != 200: return client_http_error(url, r.status_code, json.loads(r.text)['errors']) try: r_json = json.loads(r.text) except ValueError: raise cachetapiexception( "Unable to parse json: %s" % r.text ) logging.debug("Response Body: %s", json.dumps(r_json, indent=4, separators=(',', ': '))) return r_json def _http_put(self, url, params): url = self.server + url logging.debug("Sending to {url}: {param}".format(url=url, param=json.dumps(params, indent=4, separators=(',', ': ')))) try: r = requests.put(url=url, json=params, headers=self.headers, verify=self.verify) except requests.exceptions.RequestException as e: raise client_http_error(url, None, e) if r.status_code != 200: return client_http_error(url, r.status_code, r.text) try: r_json = json.loads(r.text) except ValueError: raise cachetapiexception( "Unable to parse json: %s" % r.text ) logging.debug("Response Body: %s", json.dumps(r_json, indent=4, separators=(',', ': '))) return r_json def get_version(self): url = 'version' data = self._http_get(url) return data['data'] def get_component(self, id): url = 'components/' + str(id) data = self._http_get(url) return data def get_components(self, name=None): url = 'components' data = self._http_get(url) total_pages = int(data['meta']['pagination']['total_pages']) if name: components = [] for page in range(total_pages, 0, -1): if page == 1: data_page = data else: data_page = self._http_get(url, params={'page': page}) for component in data_page['data']: if component['name'] == name: components.append(component) if len(components) < 1: return {'id': 0, 'name': 'Does not exists'} else: return components return data
MIT License
stevezheng23/sequence_labeling_tf
sequence_labeling/util/eval_logger.py
EvalLogger.update_extrinsic_eval
python
def update_extrinsic_eval(self, eval_result_list, basic_info): self.extrinsic_eval = eval_result_list self.extrinsic_eval_info = basic_info
update evaluation logger with extrinsic evaluation result
https://github.com/stevezheng23/sequence_labeling_tf/blob/05fcbec15e359e3db86af6c3798c13be8a6c58ee/sequence_labeling/util/eval_logger.py#L35-L40
import codecs import collections import os.path import time import json import numpy as np import tensorflow as tf __all__ = ["BasicInfoEvalLog", "ExtrinsicEvalLog", "EvalLogger"] class BasicInfoEvalLog(collections.namedtuple("BasicInfoEvalLog", ("epoch", "global_step"))): pass class ExtrinsicEvalLog(collections.namedtuple("ExtrinsicEvalLog", ("metric", "score", "sample_output", "sample_size"))): pass class EvalLogger(object): def __init__(self, output_dir): self.extrinsic_eval = None self.extrinsic_eval_info = None self.extrinsic_eval_detail = None self.extrinsic_eval_detail_info = None """initialize evaluation logger""" self.output_dir = output_dir if not tf.gfile.Exists(self.output_dir): tf.gfile.MakeDirs(self.output_dir) self.log_file = os.path.join(self.output_dir, "eval_{0}.log".format(time.time())) self.log_writer = codecs.getwriter("utf-8")(tf.gfile.GFile(self.log_file, mode="a"))
Apache License 2.0
pennylaneai/pennylane-forest
pennylane_forest/wavefunction.py
WavefunctionDevice.bit2dec
python
def bit2dec(x): y = 0 for i, j in enumerate(x[::-1]): y += j << i return y
Auxiliary method that converts a bitstring to a decimal integer using the PennyLane convention of bit ordering. Args: x (Iterable): bit string Returns: int: decimal value of the bitstring
https://github.com/pennylaneai/pennylane-forest/blob/670c7e2527e8c3f3c5373dfffb10f14f7195880f/pennylane_forest/wavefunction.py#L94-L107
import itertools import numpy as np from numpy.linalg import eigh from pennylane.wires import Wires from pyquil.api import WavefunctionSimulator from .device import ForestDevice from ._version import __version__ I = np.identity(2) X = np.array([[0, 1], [1, 0]]) Y = np.array([[0, -1j], [1j, 0]]) Z = np.array([[1, 0], [0, -1]]) H = np.array([[1, 1], [1, -1]]) / np.sqrt(2) observable_map = {"PauliX": X, "PauliY": Y, "PauliZ": Z, "Identity": I, "Hadamard": H} class WavefunctionDevice(ForestDevice): name = "Forest Wavefunction Simulator Device" short_name = "forest.wavefunction" observables = {"PauliX", "PauliY", "PauliZ", "Hadamard", "Hermitian", "Identity"} def __init__(self, wires, *, shots=None, **kwargs): super().__init__(wires, shots, **kwargs) self.connection = super()._get_connection(**kwargs) self.qc = WavefunctionSimulator(connection=self.connection) self._state = None def apply(self, operations, **kwargs): super().apply(operations, **kwargs) self._state = self.qc.wavefunction(self.prog).amplitudes self._state = self._state.reshape([2] * len(self._active_wires)).T.flatten() self.expand_state() @staticmethod
BSD 3-Clause New or Revised License
kakao/khaiii
train/transform_corpus.py
run
python
def run(args: Namespace): random.seed(args.seed) sentences = [] if args.input_format == 'sejong': sentences = Sentence.load_sejong() elif args.input_format == 'train': sentences = Sentence.load_train(args.rsc_src) else: raise ValueError(f'invalid input format: {args.input_format}') for sentence in sentences: sentence.merge_words(args.merge_rate) if args.output_format == 'raw': print(sentence.raw()) elif args.output_format == 'khaiii': print(str(sentence)) else: raise ValueError(f'invalid output format: {args.output_format}')
run function which is the start point of program Args: args: program arguments
https://github.com/kakao/khaiii/blob/328d5a8af456a5941130383354c07d1cd0e47cf5/train/transform_corpus.py#L106-L129
from argparse import ArgumentParser, Namespace import logging import random import sys from typing import List from khaiii.munjong.sejong_corpus import sents from khaiii.resource.resource import Resource from khaiii.train.dataset import PosDataset class Sentence: def __init__(self): self.words = [] self.morphs = [] def merge_words(self, rate: float = 0.0): if rate <= 0.0: return idx = 0 while idx < len(self.words)-1: if random.random() >= rate: idx += 1 continue self.words[idx] += self.words[idx+1] self.morphs[idx] += ' + ' + self.morphs[idx+1] del self.words[idx+1] del self.morphs[idx+1] def __str__(self): words_str = [f'{w}\t{m}' for w, m in zip(self.words, self.morphs)] return '\n'.join(words_str) + '\n' def raw(self): return ' '.join(self.words) @classmethod def load_sejong(cls) -> List['Sentence']: sentences = [] for sent in sents(sys.stdin): sentence = Sentence() for word in sent.words: sentence.words.append(word.raw) sentence.morphs.append(' + '.join([str(m) for m in word.morphs])) sentences.append(sentence) return sentences @classmethod def load_train(cls, rsc_src: str) -> List['Sentence']: restore_dic = Resource.load_restore_dic(f'{rsc_src}/restore.dic') sentences = [] for sent in PosDataset(None, restore_dic, sys.stdin): sentence = Sentence() for word in sent.pos_tagged_words: sentence.words.append(word.raw) sentence.morphs.append(' + '.join([str(m) for m in word.pos_tagged_morphs])) sentences.append(sentence) return sentences
Apache License 2.0
argoproj-labs/argo-client-python
argo/workflows/client/models/v1alpha1_workflow_status.py
V1alpha1WorkflowStatus.outputs
python
def outputs(self): return self._outputs
Gets the outputs of this V1alpha1WorkflowStatus. # noqa: E501 :return: The outputs of this V1alpha1WorkflowStatus. # noqa: E501 :rtype: V1alpha1Outputs
https://github.com/argoproj-labs/argo-client-python/blob/993d684cab39a834770b296e028519cec035c7b5/argo/workflows/client/models/v1alpha1_workflow_status.py#L292-L299
import pprint import re import six from argo.workflows.client.configuration import Configuration class V1alpha1WorkflowStatus(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'compressed_nodes': 'str', 'conditions': 'list[V1alpha1Condition]', 'estimated_duration': 'int', 'finished_at': 'datetime', 'message': 'str', 'nodes': 'dict(str, V1alpha1NodeStatus)', 'offload_node_status_version': 'str', 'outputs': 'V1alpha1Outputs', 'persistent_volume_claims': 'list[V1Volume]', 'phase': 'str', 'progress': 'str', 'resources_duration': 'dict(str, int)', 'started_at': 'datetime', 'stored_templates': 'dict(str, V1alpha1Template)', 'stored_workflow_template_spec': 'V1alpha1WorkflowSpec', 'synchronization': 'V1alpha1SynchronizationStatus' } attribute_map = { 'compressed_nodes': 'compressedNodes', 'conditions': 'conditions', 'estimated_duration': 'estimatedDuration', 'finished_at': 'finishedAt', 'message': 'message', 'nodes': 'nodes', 'offload_node_status_version': 'offloadNodeStatusVersion', 'outputs': 'outputs', 'persistent_volume_claims': 'persistentVolumeClaims', 'phase': 'phase', 'progress': 'progress', 'resources_duration': 'resourcesDuration', 'started_at': 'startedAt', 'stored_templates': 'storedTemplates', 'stored_workflow_template_spec': 'storedWorkflowTemplateSpec', 'synchronization': 'synchronization' } def __init__(self, compressed_nodes=None, conditions=None, estimated_duration=None, finished_at=None, message=None, nodes=None, offload_node_status_version=None, outputs=None, persistent_volume_claims=None, phase=None, progress=None, resources_duration=None, started_at=None, stored_templates=None, stored_workflow_template_spec=None, synchronization=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._compressed_nodes = None self._conditions = None self._estimated_duration = None self._finished_at = None self._message = None self._nodes = None self._offload_node_status_version = None self._outputs = None self._persistent_volume_claims = None self._phase = None self._progress = None self._resources_duration = None self._started_at = None self._stored_templates = None self._stored_workflow_template_spec = None self._synchronization = None self.discriminator = None if compressed_nodes is not None: self.compressed_nodes = compressed_nodes if conditions is not None: self.conditions = conditions if estimated_duration is not None: self.estimated_duration = estimated_duration if finished_at is not None: self.finished_at = finished_at if message is not None: self.message = message if nodes is not None: self.nodes = nodes if offload_node_status_version is not None: self.offload_node_status_version = offload_node_status_version if outputs is not None: self.outputs = outputs if persistent_volume_claims is not None: self.persistent_volume_claims = persistent_volume_claims if phase is not None: self.phase = phase if progress is not None: self.progress = progress if resources_duration is not None: self.resources_duration = resources_duration if started_at is not None: self.started_at = started_at if stored_templates is not None: self.stored_templates = stored_templates if stored_workflow_template_spec is not None: self.stored_workflow_template_spec = stored_workflow_template_spec if synchronization is not None: self.synchronization = synchronization @property def compressed_nodes(self): return self._compressed_nodes @compressed_nodes.setter def compressed_nodes(self, compressed_nodes): self._compressed_nodes = compressed_nodes @property def conditions(self): return self._conditions @conditions.setter def conditions(self, conditions): self._conditions = conditions @property def estimated_duration(self): return self._estimated_duration @estimated_duration.setter def estimated_duration(self, estimated_duration): self._estimated_duration = estimated_duration @property def finished_at(self): return self._finished_at @finished_at.setter def finished_at(self, finished_at): self._finished_at = finished_at @property def message(self): return self._message @message.setter def message(self, message): self._message = message @property def nodes(self): return self._nodes @nodes.setter def nodes(self, nodes): self._nodes = nodes @property def offload_node_status_version(self): return self._offload_node_status_version @offload_node_status_version.setter def offload_node_status_version(self, offload_node_status_version): self._offload_node_status_version = offload_node_status_version @property
Apache License 2.0
approxeng/approxeng.input
src/python/approxeng/input/__init__.py
ButtonPresses.__contains__
python
def __contains__(self, item): return item in self.names
Contains check for a button sname :param item: The sname of the button to check :return: True if the button is in the set of pressed buttons, false otherwise
https://github.com/approxeng/approxeng.input/blob/5822ef47ca139078b3771cfa29032d76a3fe7503/src/python/approxeng/input/__init__.py#L991-L1000
import logging from abc import ABC, abstractmethod from math import sqrt from time import time from typing import Optional, Union, Tuple import functools from evdev import InputEvent, ff, ecodes from approxeng.input.sys import sys_nodes logger = logging.getLogger(name='approxeng.input') def map_into_range(low, high, raw_value): value = float(raw_value) if low < high: if value < low: return 0 elif value > high: return 1.0 elif low > high: if value > low: return 0 elif value < high: return -1.0 return (value - low) / abs(high - low) def map_single_axis(low, high, dead_zone, hot_zone, value): input_range = high - low corrected_low = low + input_range * dead_zone corrected_high = high - input_range * hot_zone return map_into_range(corrected_low, corrected_high, value) def map_dual_axis(low, high, centre, dead_zone, hot_zone, value): if value <= centre: return map_single_axis(centre, low, dead_zone, hot_zone, value) else: return map_single_axis(centre, high, dead_zone, hot_zone, value) class Controller(ABC): def __init__(self, controls, node_mappings=None, dead_zone=None, hot_zone=None, ff_device=None): self.axes = Axes([control for control in controls if isinstance(control, CentredAxis) or isinstance(control, BinaryAxis) or isinstance(control, TriggerAxis)]) self.buttons = Buttons([control for control in controls if isinstance(control, Button) or isinstance(control, BinaryAxis) or isinstance(control, TriggerAxis)]) if dead_zone is not None: for axis in self.axes.axes: axis.dead_zone = dead_zone if hot_zone is not None: for axis in self.axes.axes: axis.hot_zone = hot_zone self.node_mappings = node_mappings self.device_unique_name = None self.exception = None self.ff_device = ff_device class ControllerStream(object): def __init__(self, controller): self.controller = controller def __getitem__(self, item): def generator(): while self.controller.connected: yield self.controller.__getitem__(item) return generator() self.stream = ControllerStream(self) @functools.lru_cache(maxsize=None) def _build_effect(self, milliseconds=1000, strong_magnitude=0x0000, weak_magnitude=0xffff) -> int: if self.ff_device: logger.info('compiling new force feedback effect') effect = ff.Effect( ecodes.FF_RUMBLE, -1, 0, ff.Trigger(0, 0), ff.Replay(milliseconds, 0), ff.EffectType( ff_rumble_effect=ff.Rumble(strong_magnitude=strong_magnitude, weak_magnitude=weak_magnitude)) ) return self.ff_device.upload_effect(effect) else: raise ValueError('no force-feedback node, unable to compile effect') def rumble(self, milliseconds=1000): if self.ff_device: logger.debug('controller go brrrr') effect_id = self._build_effect(milliseconds=milliseconds) repeat_count = 1 self.ff_device.write(ecodes.EV_FF, effect_id, repeat_count) else: logger.warning('no force-feedback node for this controller') @property def has_force_feedback(self): return self.ff_device is not None @property def sys_nodes(self) -> {}: if self.device_unique_name is not None: return sys_nodes(self.device_unique_name) return {} def read_led_value(self, led_name) -> Optional[int]: if self.device_unique_name is not None: return sys.read_led_value(self.device_unique_name, led_name) return None def write_led_value(self, led_name: str, value: int): if self.device_unique_name is not None: sys.write_led_value(self.device_unique_name, led_name, value) @property def battery_level(self) -> Optional[float]: if self.device_unique_name is not None: return sys.read_power_level(self.device_unique_name) return None @staticmethod @abstractmethod def registration_ids() -> [Tuple[int, int]]: pass @property def connected(self) -> bool: if self.device_unique_name: return True return False def __getitem__(self, item: Union[str, Tuple[str, ...]]) -> [Optional[float]]: if isinstance(item, tuple): return [self.__getattr__(single_item) for single_item in item] return self.__getattr__(item) def __getattr__(self, item: str) -> Optional[float]: if item in self.axes: return self.axes[item].value elif item in self.buttons: return self.buttons.held(item) raise AttributeError def __contains__(self, item: str) -> bool: if item in self.axes: return True if item in self.buttons: return True return False def check_presses(self) -> 'ButtonPresses': return self.buttons.check_presses() @property def has_presses(self) -> bool: return self.buttons.presses.has_presses @property def has_releases(self) -> bool: return self.buttons.releases.has_presses @property def presses(self) -> 'ButtonPresses': return self.buttons.presses @property def releases(self) -> 'ButtonPresses': return self.buttons.releases @property def controls(self) -> {}: return {'axes': self.axes.names, 'buttons': self.buttons.names} def register_button_handler(self, button_handler, button_sname: str): return self.buttons.register_button_handler(button_handler, self.buttons[button_sname]) def __str__(self) -> str: return "{}, axes={}, buttons={}".format(self.__class__.__name__, self.axes, self.buttons) class Axes(object): def __init__(self, axes): self.axes = axes self.axes_by_code = {axis.axis_event_code: axis for axis in axes} self.axes_by_sname = {axis.sname: axis for axis in axes} def add_circular_axis(rootname): xname = rootname + 'x' yname = rootname + 'y' if xname in self.axes_by_sname and yname in self.axes_by_sname: self.axes_by_sname[rootname] = CircularCentredAxis(x=self.axes_by_sname[xname], y=self.axes_by_sname[yname]) for prefix in ['l', 'r', 'd']: add_circular_axis(prefix) def axis_updated(self, event: InputEvent, prefix=None): if prefix is not None: axis = self.axes_by_code.get(prefix + str(event.code)) else: axis = self.axes_by_code.get(event.code) if axis is not None: axis.receive_device_value(event.value) else: logger.debug('Unknown axis code {} ({}), value {}'.format(event.code, prefix, event.value)) def set_axis_centres(self, *args): for axis in self.axes_by_code.values(): if isinstance(axis, CentredAxis): axis.centre = axis.value def reset_axis_calibration(self, *args): for axis in self.axes: axis.reset() def __str__(self): return list("{}={}".format(axis.name, axis.value) for axis in self.axes_by_code.values()).__str__() @property def names(self) -> [str]: return sorted([name for name in self.axes_by_sname.keys() if name != '']) @property def active_axes(self) -> ['Axis']: return [axis for axis in self.axes if axis.value != 0] def __getitem__(self, sname: str) -> Optional['Axis']: return self.axes_by_sname.get(sname) def __getattr__(self, item) -> 'Axis': if item in self.axes_by_sname: return self.get(item) raise AttributeError def __contains__(self, item: str) -> bool: return item in self.axes_by_sname class Axis(ABC): @property @abstractmethod def value(self) -> float: pass @abstractmethod def receive_device_value(self, value: int): pass class TriggerAxis(Axis): def __init__(self, name: str, min_raw_value: int, max_raw_value: int, axis_event_code: int, dead_zone=0.0, hot_zone=0.0, sname: Optional[str] = None, button_sname: Optional[str] = None, button_trigger_value=0.5): self.name = name self.max = 0.9 self.min = 0.1 self.__value = self.min self.dead_zone = dead_zone self.hot_zone = hot_zone self.min_raw_value = min_raw_value self.max_raw_value = max_raw_value self.axis_event_code = axis_event_code self.sname = sname self.buttons = None self.button_trigger_value = button_trigger_value if button_sname is not None: self.button = Button(name='{}_trigger_button'.format(name), key_code='{}_trigger_button'.format(axis_event_code), sname=button_sname) else: self.button = None def _input_to_raw_value(self, value: int) -> float: return (float(value) - self.min_raw_value) / self.max_raw_value @property def raw_value(self) -> float: return self.__value @property def value(self) -> float: return map_single_axis(self.min, self.max, self.dead_zone, self.hot_zone, self.__value) def reset(self): self.max = 0.9 self.min = 0.1 def receive_device_value(self, raw_value: int): new_value = self._input_to_raw_value(raw_value) if self.button is not None: if new_value > (self.button_trigger_value + 0.05) > self.__value: self.buttons.button_pressed(self.button.key_code) elif new_value < (self.button_trigger_value - 0.05) < self.__value: self.buttons.button_released(self.button.key_code) self.__value = new_value if new_value > self.max: self.max = new_value elif new_value < self.min: self.min = new_value def __str__(self): return "TriggerAxis name={}, sname={}, corrected_value={}".format(self.name, self.sname, self.value) class BinaryAxis(Axis): def __init__(self, name, axis_event_code, b1name=None, b2name=None): self.name = name self.axis_event_code = axis_event_code self.b1 = Button('{}_left_button'.format(name), key_code='{}_left'.format(axis_event_code), sname=b1name) self.b2 = Button('{}_right_button'.format(name), key_code='{}_right'.format(axis_event_code), sname=b2name) self.buttons = None self.last_value = 0 self.sname = '' self.__value = 0 def receive_device_value(self, raw_value: int): self.__value = raw_value if self.buttons is not None: if self.last_value < 0: self.buttons.button_released(self.b1.key_code) elif self.last_value > 0: self.buttons.button_released(self.b2.key_code) self.last_value = raw_value if raw_value < 0: self.buttons.button_pressed(self.b1.key_code) elif raw_value > 0: self.buttons.button_pressed(self.b2.key_code) @property def value(self): return self.__value def __str__(self): return "BinaryAxis name={}, sname={}, corrected_value={}".format(self.name, self.sname, self.value) class CircularCentredAxis: def __init__(self, x: "CentredAxis", y: "CentredAxis", dead_zone=0.1, hot_zone=0.1): self.x = x self.y = y self.dead_zone = dead_zone self.hot_zone = hot_zone def _calculate_position(self, raw_x: float, raw_y: float): if raw_x != 0 or raw_y != 0: distance = sqrt(raw_x * raw_x + raw_y * raw_y) else: return 0.0, 0.0 if distance >= 1.0 - self.hot_zone: return raw_x / distance, raw_y / distance elif distance <= self.dead_zone: return 0.0, 0.0 effective_distance = (distance - self.dead_zone) / (1.0 - (self.dead_zone + self.hot_zone)) scale = effective_distance / distance return raw_x * scale, raw_y * scale @property def value(self) -> (float, float): return self._calculate_position(raw_x=self.x.raw_value if not self.x.invert else -self.x.raw_value, raw_y=self.y.raw_value if not self.y.invert else -self.y.raw_value) class CentredAxis(Axis): def __init__(self, name, min_raw_value, max_raw_value, axis_event_code, dead_zone=0.0, hot_zone=0.0, sname=None): self.name = name self.centre = 0.0 self.max = 0.9 self.min = -0.9 self.__value = 0.0 self.invert = min_raw_value > max_raw_value self.dead_zone = dead_zone self.hot_zone = hot_zone self.min_raw_value = float(min(min_raw_value, max_raw_value)) self.max_raw_value = float(max(min_raw_value, max_raw_value)) self.axis_event_code = axis_event_code self.sname = sname def _input_to_raw_value(self, value: int): return (float(value) - self.min_raw_value) * (2 / (self.max_raw_value - self.min_raw_value)) - 1.0 @property def raw_value(self) -> float: return self.__value @property def value(self) -> float: mapped_value = map_dual_axis(self.min, self.max, self.centre, self.dead_zone, self.hot_zone, self.__value) if self.invert: return -mapped_value else: return mapped_value def reset(self): self.centre = 0.0 self.max = 0.9 self.min = -0.9 def receive_device_value(self, raw_value: int): new_value = self._input_to_raw_value(raw_value) self.__value = new_value if new_value > self.max: self.max = new_value elif new_value < self.min: self.min = new_value def __str__(self) -> str: return "CentredAxis name={}, sname={}, corrected_value={}".format(self.name, self.sname, self.value) class Button(object): def __init__(self, name, key_code=None, sname=None): self.name = name self.key_code = key_code self.sname = sname def __repr__(self): return "Button(name={}, code={}, sname={})".format(self.name, self.key_code, self.sname) class ButtonPresses(object): def __init__(self, buttons): self.buttons = buttons self.names = list([button.sname for button in buttons]) def __getitem__(self, item): if isinstance(item, tuple): return [(single_item in self.names) for single_item in item] return item in self.names def __getattr__(self, item): return item in self.names
Apache License 2.0
phillipdupuis/pydantic-to-typescript
pydantic2ts/cli/script.py
import_module
python
def import_module(path: str) -> ModuleType: try: if os.path.exists(path): name = uuid4().hex spec = spec_from_file_location(name, path, submodule_search_locations=[]) module = module_from_spec(spec) sys.modules[name] = module spec.loader.exec_module(module) return module else: return importlib.import_module(path) except BaseException as e: logger.error( "The --module argument must be a module path separated by dots or a valid filepath" ) raise e
Helper which allows modules to be specified by either dotted path notation or by filepath. If we import by filepath, we must also assign a name to it and add it to sys.modules BEFORE calling 'spec.loader.exec_module' because there is code in pydantic which requires that the definition exist in sys.modules under that name.
https://github.com/phillipdupuis/pydantic-to-typescript/blob/a8f182d13ab279ec09b4a091abe018b63a1ae018/pydantic2ts/cli/script.py#L25-L47
import importlib import inspect import json import logging import os import shutil import sys from importlib.util import spec_from_file_location, module_from_spec from tempfile import mkdtemp from types import ModuleType from typing import Type, Dict, Any, List, Tuple from uuid import uuid4 import click from pydantic import BaseModel, Extra, create_model try: from pydantic.generics import GenericModel except ImportError: GenericModel = None logger = logging.getLogger("pydantic2ts")
MIT License
google/openhtf
openhtf/plugs/usb/usb_handle.py
UsbHandle.is_closed
python
def is_closed(self):
Returns True if this handle has been closed, False otherwise.
https://github.com/google/openhtf/blob/4646aa6b9ba67532ce7e8743ce16d7bd4369ad3d/openhtf/plugs/usb/usb_handle.py#L190-L191
import abc import functools import logging from future.utils import with_metaclass from openhtf.plugs.usb import usb_exceptions DEFAULT_TIMEOUT_MS = 5000 FLUSH_READ_SIZE = 1024 * 64 _LOG = logging.getLogger(__name__) def requires_open_handle(method): @functools.wraps(method) def wrapper_requiring_open_handle(self, *args, **kwargs): if self.is_closed(): raise usb_exceptions.HandleClosedError() return method(self, *args, **kwargs) return wrapper_requiring_open_handle class UsbHandle(with_metaclass(abc.ABCMeta, object)): def __init__(self, serial_number, name=None, default_timeout_ms=None): self.serial_number = serial_number self.name = name or '' self._default_timeout_ms = default_timeout_ms or DEFAULT_TIMEOUT_MS def __del__(self): if not self.is_closed(): _LOG.error('!!!!!USB!!!!! %s not closed!', type(self).__name__) def __str__(self): return '<%s: (%s %s)>' % (type(self).__name__, self.name, self.serial_number) __repr__ = __str__ def _timeout_or_default(self, timeout_ms): return int( timeout_ms if timeout_ms is not None else self._default_timeout_ms) def flush_buffers(self): while True: try: self.read(FLUSH_READ_SIZE, timeout_ms=10) except usb_exceptions.LibusbWrappingError as exception: if exception.is_timeout(): break raise @abc.abstractmethod def read(self, length, timeout_ms=None): @abc.abstractmethod def write(self, data, timeout_ms=None): @abc.abstractmethod def close(self): @abc.abstractmethod
Apache License 2.0
conjure-up/conjure-up
conjureup/models/provider.py
MAAS._has_correct_api_key
python
def _has_correct_api_key(self): field = self.form.field('maas-oauth') key = (field.value or '').split(':') if len(key) != 3: return ( False, "Could not determine tokens, usually indicates an " "error with the format of the API KEY. That format " "should be 'aaaaa:bbbbb:cccc'. Please visit your MAAS user " "preferences page to grab the correct API Key: " "http://<maas-server>:5240/MAAS/account/prefs/") return (True, None)
Validates MAAS Api key
https://github.com/conjure-up/conjure-up/blob/d2bf8ab8e71ff01321d0e691a8d3e3833a047678/conjureup/models/provider.py#L315-L328
import ipaddress import json from collections import OrderedDict from functools import partial from pathlib import Path from subprocess import CalledProcessError from urllib.parse import urljoin, urlparse from pkg_resources import parse_version from ubuntui.widgets.input import PasswordEditor, StringEditor, YesNo from urwid import Text from conjureup import errors, utils from conjureup.app_config import app from conjureup.consts import cloud_types from conjureup.juju import get_cloud from conjureup.models.credential import CredentialManager from conjureup.utils import arun, is_valid_hostname from conjureup.vsphere import VSphereClient, VSphereInvalidLogin class Field: def __init__(self, label=None, widget=None, key=None, storable=True, error=None, required=True, validator=None): self.label = label self.widget = widget self.key = key self.storable = storable self.error = Text("") self.required = required self.validator = validator def validate(self): self.error.set_text("") if self.required and not self.value: self.error.set_text("This field is required and cannot be empty.") return False if self.validator and callable(self.validator): is_valid, msg = self.validator() if not is_valid: self.error.set_text(msg) return False return True @property def value(self): return self.widget.value @value.setter def value(self, value): self.widget.value = value class Form: def __init__(self, widgets): self._fields = [] for w in widgets: key = w.key.replace('-', '_') setattr(self, key, w) self._fields.append(getattr(self, key)) def fields(self): return self._fields def field(self, key): for w in self.fields(): if key == w.key: return w return None class BaseProvider: def __init__(self): self.auth_type = None self.endpoint = None self.model = None self.model_defaults = None self.controller = None self.cloud = None self.cloud_type = None self.regions = [] self.region = None self.credential = None self.client = None self.authenticated = False self.form = None def is_valid(self): validations = [] for f in self.form.fields(): validations.append(f.validate()) if not all(validations): return False return True async def login(self): pass async def cloud_config(self): raise NotImplementedError @property def default_region(self): return None async def configure_tools(self): pass def load(self, cloud_name): try: _cloud = get_cloud(cloud_name) self.cloud = cloud_name self.endpoint = _cloud.get('endpoint', None) self.regions = sorted(_cloud.get('regions', {}).keys()) except LookupError: raise errors.SchemaCloudError(cloud_name) async def save_form(self): for f in self.form.fields(): key = f.key.replace('-', '_') setattr(self, key, f.widget.value) class AWS(BaseProvider): def __init__(self): super().__init__() self.auth_type = 'access-key' self.cloud_type = cloud_types.AWS self.form = Form([Field(label='AWS Access Key', widget=StringEditor(), key='access-key'), Field(label='AWS Secret Key', widget=StringEditor(), key='secret-key')]) @property def default_region(self): return 'us-east-1' async def configure_tools(self): cred = CredentialManager.get_credential(self.cloud, self.cloud_type, self.credential) try: ret, _, _ = await arun(['aws', 'configure', 'list', '--profile', self.credential], check=False) if ret != 0: await arun(['aws', 'configure', '--profile', self.credential], input='{}\n{}\n\n\n'.format(cred.access_key, cred.secret_key), check=True) except CalledProcessError as e: app.log.error('Failed to configure AWS CLI profile: {}'.format( e.stderr)) raise class MAAS(BaseProvider): def __init__(self): super().__init__() self.auth_type = 'oauth1' self.cloud_type = cloud_types.MAAS self.form = Form( [ Field( label='API Endpoint (http://example.com:5240/MAAS)', widget=StringEditor(), key='endpoint', storable=False, validator=partial(self._has_correct_endpoint) ), Field( label='API Key', widget=StringEditor(), key='maas-oauth', validator=partial(self._has_correct_api_key)) ] ) async def cloud_config(self): return { 'type': 'maas', 'auth-types': ['oauth1'], 'endpoint': self.endpoint } def _has_correct_endpoint(self): field = self.form.field('endpoint') endpoint = field.value if endpoint.startswith('http'): url = urlparse(endpoint) if not url.netloc: return (False, "Unable to determine the web address, " "please use the format of " "http://maas-server.com:5240/MAAS") else: if 'MAAS' not in url.path: field.value = urljoin(url.geturl(), "MAAS") return (True, None) elif is_valid_hostname(endpoint): field.value = urljoin("http://{}:5240".format(endpoint), "MAAS") return (True, None) else: try: ip = endpoint.split(':') port = '5240' if len(ip) == 2: ip, port = ip else: ip = ip.pop() ipaddress.ip_address(ip) field.value = urljoin( "http://{}:{}".format(ip, port), "MAAS") return (True, None) except ValueError: pass return (False, "Unable to validate that this entry is " "the correct format. Please use the format of " "http://maas-server.com:5240/MAAS")
MIT License
datadotworld/data.world-py
datadotworld/client/_swagger/models/catalog_table_hydration_dto.py
CatalogTableHydrationDto.agentid
python
def agentid(self): return self._agentid
Gets the agentid of this CatalogTableHydrationDto. :return: The agentid of this CatalogTableHydrationDto. :rtype: str
https://github.com/datadotworld/data.world-py/blob/7e5f474b655f4f0c88cc6862353e4d52c0e0bb31/datadotworld/client/_swagger/models/catalog_table_hydration_dto.py#L112-L119
from pprint import pformat from six import iteritems import re class CatalogTableHydrationDto(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'agentid': 'str', 'catalogs': 'list[CatalogId]', 'created_by': 'AgentHydrationDto', 'description': 'str', 'entry_type_hierarchy': 'list[EntryType]', 'entry_type_label': 'str', 'id': 'str', 'referent': 'str', 'source_id': 'SourceId', 'tableid': 'str', 'title': 'str', 'type': 'str', 'updated': 'datetime' } attribute_map = { 'agentid': 'agentid', 'catalogs': 'catalogs', 'created_by': 'createdBy', 'description': 'description', 'entry_type_hierarchy': 'entryTypeHierarchy', 'entry_type_label': 'entryTypeLabel', 'id': 'id', 'referent': 'referent', 'source_id': 'sourceId', 'tableid': 'tableid', 'title': 'title', 'type': 'type', 'updated': 'updated' } def __init__(self, agentid=None, catalogs=None, created_by=None, description=None, entry_type_hierarchy=None, entry_type_label=None, id=None, referent=None, source_id=None, tableid=None, title=None, type=None, updated=None): self._agentid = None self._catalogs = None self._created_by = None self._description = None self._entry_type_hierarchy = None self._entry_type_label = None self._id = None self._referent = None self._source_id = None self._tableid = None self._title = None self._type = None self._updated = None if agentid is not None: self.agentid = agentid if catalogs is not None: self.catalogs = catalogs if created_by is not None: self.created_by = created_by if description is not None: self.description = description if entry_type_hierarchy is not None: self.entry_type_hierarchy = entry_type_hierarchy if entry_type_label is not None: self.entry_type_label = entry_type_label if id is not None: self.id = id if referent is not None: self.referent = referent if source_id is not None: self.source_id = source_id if tableid is not None: self.tableid = tableid if title is not None: self.title = title if type is not None: self.type = type if updated is not None: self.updated = updated @property
Apache License 2.0
tsutof/jetson-thermal-monitor
thermal_zone.py
get_thermal_zone_paths
python
def get_thermal_zone_paths(): return [os.path.join(THERMAL_PATH, m.group(0)) for m in [re.search('thermal_zone[0-9]', d) for d in os.listdir(THERMAL_PATH)] if m]
Returns a list of the thermal zone paths
https://github.com/tsutof/jetson-thermal-monitor/blob/50cf5ee89d9c6e804b99b2513038e4de0eea2288/thermal_zone.py#L32-L38
import os import re import subprocess THERMAL_PATH = '/sys/devices/virtual/thermal/'
MIT License
arvehj/jvcprojectortools
plot.py
Plot.do_plot
python
def do_plot(self, *gamma, draw_speed=16, **kwargs): self.tables.append((gamma, kwargs)) self.plot_table(*gamma, draw_speed=draw_speed, **kwargs)
Plot gamma table
https://github.com/arvehj/jvcprojectortools/blob/2361e5e569ec7a75407b42fff2934fa69de21fd1/plot.py#L309-L312
import math import numbers import queue import threading import turtle from tkinter.font import Font class PlotClosed(Exception): pass class Plot(): def __init__(self): self.queue = queue.Queue(maxsize=1) self.margin = [0, 0, 0, 0] self.plot_area = (0, 0, 255, 1023) self.min_size = (2, 8) self.zoom_area = [*self.plot_area] self.scale = 1 self.font = ('Ariel', 8) self.closed = False self.window_size = None self.tables = [] try: turtle.setup() except turtle.Terminator: turtle.setup() def clear(self, lines=()): self.enqueue(lambda: self.do_clear(lines)) def zoom(self, level=None, direction=(0, 0)): self.enqueue(lambda: self.do_zoom(level, direction)) def plot(self, *gamma, colors=['red', 'green', 'blue'], draw_speed=16, scale_x=1): self.enqueue(lambda: self.do_plot(*gamma, colors=colors, draw_speed=draw_speed, scale_x=scale_x)) def close(self): try: self.enqueue(self.do_close) except PlotClosed: pass def enqueue(self, func): retry_count = 0 while True: try: if self.closed: raise PlotClosed('Plot window closed') self.queue.put(func, timeout=1) return except queue.Full: retry_count += 1 if retry_count % 10 == 0: print('queue full, waiting', retry_count) def run(self): if self.closed: raise PlotClosed('Plot window closed') opened = False try: while True: try: cmd = self.queue.get(timeout=1) break except queue.Empty: pass if cmd != self.do_close: opened = True self.do_zoom() self.do_clear() cmd() turtle.ontimer(self.check_queue, 100) turtle.mainloop() except turtle.Terminator: pass except KeyboardInterrupt: pass finally: self.closed = True if opened: try: turtle.bye() except turtle.Terminator: pass def check_queue(self): try: while True: window_size = turtle.window_width(), turtle.window_height() if self.window_size != window_size: self.redraw() turtle.ontimer(self.check_queue, 200) break cmd = self.queue.get_nowait() cmd() if cmd == self.do_close: break except queue.Empty: turtle.ontimer(self.check_queue, 100) except turtle.Terminator: pass except KeyboardInterrupt: turtle.bye() def do_close(self): turtle.bye() def do_clear(self, lines=()): turtle.clear() self.tables = [] self.lines = lines self.draw_grid() def redraw(self): turtle.clear() self.setworldcoordinates() self.draw_grid() for tables, kwargs in self.tables: self.plot_table(*tables, draw_speed=1024, **kwargs) def setworldcoordinates(self): self.window_size = turtle.window_width(), turtle.window_height() turtle.screensize(1, 1) turtle.update() turtle.setworldcoordinates(*(a + b * self.scale for a, b in zip(self.zoom_area, self.margin))) def do_zoom(self, level=None, direction=(0, 0)): self.scale = 1 if level is None: self.zoom_area = [*self.plot_area] else: for i in range(2): l = self.zoom_area[i] h = self.zoom_area[i + 2] d = direction[i]/2 + 0.5 min_l = self.plot_area[i] max_h = self.plot_area[i + 2] max_size = max_h - min_l size = h - l new_size = size / level if new_size < self.min_size[i]: new_size = self.min_size[i] if new_size > max_size: new_size = max_size new_l = max(min_l, l + (size - new_size) * d) new_h = new_l + new_size if new_h > max_h: new_h = max_h new_l = new_h - new_size self.zoom_area[i], self.zoom_area[i + 2] = new_l, new_h self.scale = new_size / max_size self.setworldcoordinates() def label_size(self, label): font_family, font_size = self.font font = Font(family=font_family, size=font_size) width = 0 lines = 0 for line in label.split('\n'): width = max(width, font.measure(line)) lines += 1 xscale = turtle.getscreen().xscale yscale = turtle.getscreen().yscale return width / xscale, font.metrics('linespace') * lines / yscale def label_pos(self, pos, label=None, horizontal=False): xscale = turtle.getscreen().xscale yscale = turtle.getscreen().yscale font_family, font_size = self.font font = Font(family=font_family, size=font_size) line_height = font.metrics('linespace') / yscale height = (label.count('\n') + 1) * line_height return -8 / xscale if horizontal else pos, pos - 0.5 * height if horizontal else -height - 6 / yscale def draw_line(self, pos, horizontal=False, label=None, color='gray90', label_color='gray35', **_): if pos is None: return if pos < self.zoom_area[0 + horizontal] or pos > self.zoom_area[2 + horizontal]: return turtle.penup() xscale = turtle.getscreen().xscale yscale = turtle.getscreen().yscale if label: font_family, font_size = self.font turtle.color(label_color) turtle.setposition(*self.label_pos(pos=pos, label=label, horizontal=horizontal)) turtle.write(label, align='right' if horizontal else'center', font=(font_family, font_size)) turtle.setposition(-4 / xscale if horizontal else pos, pos if horizontal else -4 / yscale) turtle.pendown() turtle.setposition(1 / xscale if horizontal else pos, pos if horizontal else 1 / yscale) turtle.color(color) turtle.pendown() turtle.setposition(self.plot_area[2] if horizontal else pos, pos if horizontal else self.plot_area[3]) turtle.penup() def draw_grid(self): turtle.tracer(0) turtle.hideturtle() turtle.speed(0) turtle.penup() turtle.color('gray75') turtle.setposition(0, 0) turtle.pendown() turtle.setposition(255, 0) turtle.setposition(255, 1023) turtle.setposition(0, 1023) turtle.setposition(0, 0) turtle.penup() turtle.color('gray90') lines = [line.copy() for line in self.lines] lines.sort(key=lambda x: x['pos']) last = [None, None] last_pos = [-math.inf, -math.inf] margin_pad = 4 / turtle.getscreen().xscale, 4 / turtle.getscreen().yscale margin_scale = 1 / self.scale, 1 / self.scale margin = [sign * pad * scale for sign in (-1, 1) for pad, scale in zip(margin_pad, margin_scale)] for i, line in enumerate(lines): label = line.get('label') if not label: continue h = line.get('horizontal', 0) pos = line['pos'] if pos < self.zoom_area[0 + h] or pos > self.zoom_area[2 + h]: del line['label'] continue size = self.label_size(label) label_pos = self.label_pos(pos=pos, label=label, horizontal=h) label_pos = (label_pos[0] - size[0] / (1 if h else 2), label_pos[1]) for i, p in enumerate(label_pos): pl = (p - margin_pad[i]) * margin_scale[i] if pl < margin[i]: margin[i] = pl ph = (p + size[i] - self.plot_area[i + 2] + margin_pad[i]) * margin_scale[i] if ph > margin[i + 2]: margin[i + 2] = ph resize = False for i, p in enumerate(margin): if p != self.margin[i] and i < 2: self.margin[i] = p resize = True if p != self.margin[i] and i >= 2: self.margin[i] = p resize = True if resize: self.setworldcoordinates() for i, line in enumerate(lines): label = line.get('label') if not label: continue h = line.get('horizontal', 0) pos = line['pos'] size = self.label_size(label) pad = size[h] * 0.6 if pos - pad < last_pos[h]: if line.get('priority', 0) <= last[h].get('priority', 0): del line['label'] continue del last[h]['label'] last[h] = line last_pos[h] = pos + pad for line in lines: if line is not None: if isinstance(line, numbers.Number): line = (line,) self.draw_line(**line) turtle.update()
Apache License 2.0
line/line-bot-sdk-python
linebot/models/events.py
LeaveEvent.__init__
python
def __init__(self, mode=None, timestamp=None, source=None, **kwargs): super(LeaveEvent, self).__init__( mode=mode, timestamp=timestamp, source=source, **kwargs ) self.type = 'leave'
__init__ method. :param str mode: Channel state :param long timestamp: Time of the event in milliseconds :param source: Source object :type source: T <= :py:class:`linebot.models.sources.Source` :param kwargs:
https://github.com/line/line-bot-sdk-python/blob/914a2d5520ffb68a2f0cc6894006902a42f66c71/linebot/models/events.py#L200-L213
from abc import ABCMeta from future.utils import with_metaclass from linebot.models.base import Base from linebot.models.messages import ( TextMessage, ImageMessage, VideoMessage, AudioMessage, LocationMessage, StickerMessage, FileMessage ) from linebot.models.sources import SourceUser, SourceGroup, SourceRoom from linebot.models.things import ( DeviceUnlink, DeviceLink, ScenarioResult, ) from linebot.models.things import Things from linebot.models.unsend import Unsend from linebot.models.video_play_complete import VideoPlayComplete class Event(with_metaclass(ABCMeta, Base)): def __init__(self, mode=None, timestamp=None, source=None, **kwargs): super(Event, self).__init__(**kwargs) self.type = None self.mode = mode self.timestamp = timestamp self.source = self.get_or_new_from_json_dict_with_types( source, { 'user': SourceUser, 'group': SourceGroup, 'room': SourceRoom, } ) class MessageEvent(Event): def __init__(self, mode=None, timestamp=None, source=None, reply_token=None, message=None, **kwargs): super(MessageEvent, self).__init__( mode=mode, timestamp=timestamp, source=source, **kwargs ) self.type = 'message' self.reply_token = reply_token self.message = self.get_or_new_from_json_dict_with_types( message, { 'text': TextMessage, 'image': ImageMessage, 'video': VideoMessage, 'audio': AudioMessage, 'location': LocationMessage, 'sticker': StickerMessage, 'file': FileMessage } ) class FollowEvent(Event): def __init__(self, mode=None, timestamp=None, source=None, reply_token=None, **kwargs): super(FollowEvent, self).__init__( mode=mode, timestamp=timestamp, source=source, **kwargs ) self.type = 'follow' self.reply_token = reply_token class UnfollowEvent(Event): def __init__(self, mode=None, timestamp=None, source=None, **kwargs): super(UnfollowEvent, self).__init__( mode=mode, timestamp=timestamp, source=source, **kwargs ) self.type = 'unfollow' class JoinEvent(Event): def __init__(self, mode=None, timestamp=None, source=None, reply_token=None, **kwargs): super(JoinEvent, self).__init__( mode=mode, timestamp=timestamp, source=source, **kwargs ) self.type = 'join' self.reply_token = reply_token class LeaveEvent(Event):
Apache License 2.0
michaelbrooks/twitter-monitor
twitter_monitor/stream.py
DynamicTwitterStream.stop_stream
python
def stop_stream(self): if self.stream is not None: logger.warning("Stopping twitter stream...") self.stream.disconnect() self.stream = None sleep(self.STOP_TIMEOUT)
Stops the current stream. Blocks until this is done.
https://github.com/michaelbrooks/twitter-monitor/blob/dc9846b2d70746c1f9dd5fbed015f6fcfb028c41/twitter_monitor/stream.py#L122-L136
from time import sleep, time import logging import tweepy logger = logging.getLogger(__name__) class DynamicTwitterStream(object): STOP_TIMEOUT = 1 def __init__(self, auth, listener, term_checker, **options): self.auth = auth self.listener = listener self.term_checker = term_checker self.polling = False self.stream = None self.retry_count = options.get("retry_count", 5) self.unfiltered = options.get('unfiltered', False) self.languages = options.get('languages', None) def start_polling(self, interval): interval = float(interval) self.polling = True self.term_checker.reset() logger.info("Starting polling for changes to the track list") while self.polling: loop_start = time() self.update_stream() self.handle_exceptions() elapsed = time() - loop_start sleep(max(0.1, interval - elapsed)) logger.warning("Term poll ceased!") def stop_polling(self): logger.info("Stopping polling loop") self.polling = False self.stop_stream() def update_stream(self): need_to_restart = False if self.stream is not None and not self.stream.running: logger.warning("Stream exists but isn't running") self.listener.error = False self.listener.streaming_exception = None need_to_restart = True if self.term_checker.check(): logger.info("Terms have changed") need_to_restart = True if self.stream is None and self.unfiltered: need_to_restart = True if not need_to_restart: return logger.info("Restarting stream...") self.stop_stream() self.start_stream() def start_stream(self): tracking_terms = self.term_checker.tracking_terms() if len(tracking_terms) > 0 or self.unfiltered: self.stream = tweepy.Stream(self.auth, self.listener, stall_warnings=True, timeout=90, retry_count=self.retry_count) if len(tracking_terms) > 0: logger.info("Starting new twitter stream with %s terms:", len(tracking_terms)) logger.info(" %s", repr(tracking_terms)) self.stream.filter(track=tracking_terms, is_async=True, languages=self.languages) else: logger.info("Starting new unfiltered stream") self.stream.sample(is_async=True, languages=self.languages)
MIT License
jhorey/ferry
ferry/config/spark/sparkconfig.py
SparkInitializer.apply
python
def apply(self, config, containers): entry_point = { 'type' : 'spark' } entry_point['ip'] = containers[0]['manage_ip'] config_dirs = [] new_config_dir = "/tmp/" + self._generate_config_dir(config.uuid) try: sh.mkdir('-p', new_config_dir) except: sys.stderr.write('could not create config dir ' + new_config_dir) entry_point['instances'] = [] for server in containers: entry_point['instances'].append([server['data_ip'], server['host_name']]) if not 'compute' in containers[0]: slave_file = open(new_config_dir + '/slaves', 'w+') entry_point['master'] = containers[0]['host_name'] entry_point['instances'] = [] master = entry_point['master'] for server in containers: if server != master: slave_file.write("%s\n" % server['host_name']) slave_file.close() else: compute = containers[0]['compute'][0] entry_point['master'] = compute['master'] for c in containers: config_files = new_config_dir + '/*' config_dirs.append([c['container'], config_files, config.config_directory]) return config_dirs, entry_point
Apply the configuration to the instances
https://github.com/jhorey/ferry/blob/bbaa047df08386e17130a939e20fde5e840d1ffa/ferry/config/spark/sparkconfig.py#L130-L173
import logging import os import sh import sys import time from string import Template class SparkInitializer(object): def __init__(self, system): self.template_dir = None self.template_repo = None self.container_data_dir = None self.container_log_dir = SparkConfig.log_directory def new_host_name(self, instance_id): return 'spark' + str(instance_id) def _execute_service(self, containers, entry_point, fabric, cmd): all_output = {} master = entry_point['master'] for c in containers: if c.host_name == master: output = fabric.cmd([c], '/service/sbin/startnode %s master' % cmd) else: output = fabric.cmd([c], '/service/sbin/startnode %s slave' % cmd) all_output = dict(all_output.items() + output.items()) time.sleep(4) return all_output def start_service(self, containers, entry_point, fabric): return self._execute_service(containers, entry_point, fabric, "start") def restart_service(self, containers, entry_point, fabric): return self._execute_service(containers, entry_point, fabric, "restart") def stop_service(self, containers, entry_point, fabric): return self._execute_service(containers, entry_point, fabric, "stop") def _generate_config_dir(self, uuid): return 'spark_' + str(uuid) def get_public_ports(self, num_instances): return [] def get_internal_ports(self, num_instances): return ["0-65535"] def get_working_ports(self, num_instances): return [SparkConfig.WEBUI_DRIVER, SparkConfig.WEBUI_HISTORY, SparkConfig.WEBUI_MASTER, SparkConfig.WEBUI_SLAVE, SparkConfig.MASTER_PORT, SparkConfig.SLAVE_PORT] def get_total_instances(self, num_instances, layers): instances = [] for i in range(num_instances): instances.append('spark') return instances def generate(self, num): return SparkConfig(num) def _generate_spark_env(self, new_config_dir, master): in_file = open(self.template_dir + '/spark_env.sh.template', 'r') out_file = open(new_config_dir + '/spark_env.sh', 'w+') changes = { "MASTER": master } for line in in_file: s = Template(line).substitute(changes) out_file.write(s) in_file.close() out_file.close() os.chmod(new_config_dir + '/spark_env.sh', 0755)
Apache License 2.0
lark-parser/lark
docs/ide/app/core.py
Widget._getSpellcheck
python
def _getSpellcheck(self): return True if self.element.spellcheck == "true" else False
Specifies whether the element represents an element whose contents are subject to spell checking and grammar checking. :returns: True | False
https://github.com/lark-parser/lark/blob/ee664b57201fb6e31bdb106d509ceab22ef1dd66/docs/ide/app/core.py#L408-L413
try: from js import window, eval as jseval document = window.document except: print("Emulation mode") from xml.dom.minidom import parseString jseval = None window = None document = parseString("<html><head /><body /></html>") def domCreateAttribute(tag, ns=None): uri = None if ns == "SVG": uri = "http://www.w3.org/2000/svg" elif ns == "XBL": uri = "http://www.mozilla.org/xbl" elif ns == "XUL": uri = "http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul" if uri: return document.createAttribute(uri, tag) return document.createAttribute(tag) def domCreateElement(tag, ns=None): uri = None if ns == "SVG": uri = "http://www.w3.org/2000/svg" elif ns == "XBL": uri = "http://www.mozilla.org/xbl" elif ns == "XUL": uri = "http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul" if uri: return document.createElementNS(uri, tag) return document.createElement(tag) def domCreateTextNode(txt=""): return document.createTextNode(txt) def domGetElementById(idTag): return document.getElementById(idTag) def domElementFromPoint(x, y): return document.elementFromPoint(x, y) def domGetElementsByTagName(tag): items = document.getElementsByTagName(tag) return [items.item(i) for i in range(0, int(items.length))] class TextNode(object): def __init__(self, txt=None, *args, **kwargs): super().__init__() self._parent = None self._children = [] self.element = domCreateTextNode(txt or "") self._isAttached = False def _setText(self, txt): self.element.data = txt def _getText(self): return self.element.data def __str__(self): return self.element.data def onAttach(self): self._isAttached = True def onDetach(self): self._isAttached = False def _setDisabled(self, disabled): return def _getDisabled(self): return False def children(self): return [] class _WidgetClassWrapper(list): def __init__(self, targetWidget): super().__init__() self.targetWidget = targetWidget def _updateElem(self): if len(self) == 0: self.targetWidget.element.removeAttribute("class") else: self.targetWidget.element.setAttribute("class", " ".join(self)) def append(self, p_object): list.append(self, p_object) self._updateElem() def clear(self): list.clear(self) self._updateElem() def remove(self, value): try: list.remove(self, value) except: pass self._updateElem() def extend(self, iterable): list.extend(self, iterable) self._updateElem() def insert(self, index, p_object): list.insert(self, index, p_object) self._updateElem() def pop(self, index=None): list.pop(self, index) self._updateElem() class _WidgetDataWrapper(dict): def __init__(self, targetWidget): super().__init__() self.targetWidget = targetWidget alldata = targetWidget.element for data in dir(alldata.dataset): dict.__setitem__(self, data, getattr(alldata.dataset, data)) def __setitem__(self, key, value): dict.__setitem__(self, key, value) self.targetWidget.element.setAttribute(str("data-" + key), value) def update(self, E=None, **F): dict.update(self, E, **F) if E is not None and "keys" in dir(E): for key in E: self.targetWidget.element.setAttribute(str("data-" + key), E["data-" + key]) elif E: for (key, val) in E: self.targetWidget.element.setAttribute(str("data-" + key), "data-" + val) for key in F: self.targetWidget.element.setAttribute(str("data-" + key), F["data-" + key]) class _WidgetStyleWrapper(dict): def __init__(self, targetWidget): super().__init__() self.targetWidget = targetWidget style = targetWidget.element.style for key in dir(style): realKey = "" for currChar in key: if currChar.isupper(): realKey += "-" realKey += currChar.lower() val = style.getPropertyValue(realKey) if val: dict.__setitem__(self, realKey, val) def __setitem__(self, key, value): dict.__setitem__(self, key, value) self.targetWidget.element.style.setProperty(key, value) def update(self, E=None, **F): dict.update(self, E, **F) if E is not None and "keys" in dir(E): for key in E: self.targetWidget.element.style.setProperty(key, E[key]) elif E: for (key, val) in E: self.targetWidget.element.style.setProperty(key, val) for key in F: self.targetWidget.element.style.setProperty(key, F[key]) class Widget(object): _tagName = None _namespace = None _parserTagName = None style = [] def __init__(self, *args, appendTo=None, style=None, **kwargs): if "_wrapElem" in kwargs.keys(): self.element = kwargs["_wrapElem"] del kwargs["_wrapElem"] else: assert self._tagName is not None self.element = domCreateElement(self._tagName, ns=self._namespace) super().__init__() self._widgetClassWrapper = _WidgetClassWrapper(self) self.addClass(self.style) if style: self.addClass(style) self._children = [] self._catchedEvents = {} self._disabledState = 0 self._isAttached = False self._parent = None self._lastDisplayState = None if args: self.appendChild(*args, **kwargs) if appendTo: appendTo.appendChild(self) def sinkEvent(self, *args): for event_attrName in args: event = event_attrName.lower() if event_attrName in self._catchedEvents or event in ["onattach", "ondetach"]: continue eventFn = getattr(self, event_attrName, None) assert eventFn and callable(eventFn), "{} must provide a {} method".format(str(self), event_attrName) self._catchedEvents[event_attrName] = eventFn if event.startswith("on"): event = event[2:] self.element.addEventListener(event, eventFn) def unsinkEvent(self, *args): for event_attrName in args: event = event_attrName.lower() if event_attrName not in self._catchedEvents: continue eventFn = self._catchedEvents[event_attrName] del self._catchedEvents[event_attrName] if event.startswith("on"): event = event[2:] self.element.removeEventListener(event, eventFn) def disable(self): if not self["disabled"]: self["disabled"] = True def enable(self): if self["disabled"]: self["disabled"] = False def _getDisabled(self): return bool(self._disabledState) def _setDisabled(self, disable): for child in self._children: child._setDisabled(disable) if disable: self._disabledState += 1 self.addClass("is-disabled") if isinstance(self, _attrDisabled): self.element.disabled = True elif self._disabledState: self._disabledState -= 1 if not self._disabledState: self.removeClass("is-disabled") if isinstance(self, _attrDisabled): self.element.disabled = False def _getTargetfuncName(self, key, type): assert type in ["get", "set"] return "_{}{}{}".format(type, key[0].upper(), key[1:]) def __getitem__(self, key): funcName = self._getTargetfuncName(key, "get") if funcName in dir(self): return getattr(self, funcName)() return None def __setitem__(self, key, value): funcName = self._getTargetfuncName(key, "set") if funcName in dir(self): return getattr(self, funcName)(value) raise ValueError("{} is no valid attribute for {}".format(key, (self._tagName or str(self)))) def __str__(self): return str(self.__class__.__name__) def __iter__(self): return self._children.__iter__() def _getData(self): return _WidgetDataWrapper(self) def _getTranslate(self): return True if self.element.translate == "yes" else False def _setTranslate(self, val): self.element.translate = "yes" if val == True else "no" def _getTitle(self): return self.element.title def _setTitle(self, val): self.element.title = val def _getTabindex(self): return self.element.getAttribute("tabindex") def _setTabindex(self, val): self.element.setAttribute("tabindex", val)
MIT License
clusterhq/powerstrip
powerstrip/test/test_passthru.py
getProcessOutputPTY
python
def getProcessOutputPTY(executable, args=(), env={}, path=None, reactor=None, errortoo=0, usePTY=False): return _callProtocolWithDeferredPTY(lambda d: _BackRelay(d, errortoo=errortoo), executable, args, env, path, reactor, usePTY=usePTY)
A version of getProcessOutput with a usePTY arg.
https://github.com/clusterhq/powerstrip/blob/b18dc7261b001720149849b8ed8f2a31880d76c1/powerstrip/test/test_passthru.py#L325-L333
import os from twisted.internet import defer from twisted.trial.unittest import TestCase from ..testtools import GenerallyUsefulPowerstripTestMixin def CompareDockerAndPowerstrip(test_case, cmd, usePTY=False, expectDifferentResults=False): DOCKER = b"unix:///var/run/docker.sock" POWERSTRIP = b"tcp://localhost:%(proxyPort)d" d = getProcessOutputPTY(b"/bin/bash", ["-c", cmd], { b"DOCKER_HOST": DOCKER }, errortoo=True, usePTY=usePTY) def got_result(docker_result): if not docker_result: raise ValueError("Command did not produce any output when sent to " "Docker daemon.") d = getProcessOutputPTY(b"/bin/bash", ["-c", cmd], { b"DOCKER_HOST": POWERSTRIP % dict(proxyPort=test_case.proxyPort) }, errortoo=True, usePTY=usePTY) def compare_result(powerstrip_result, docker_result): if not expectDifferentResults: test_case.assertEquals(docker_result, powerstrip_result) return powerstrip_result, docker_result d.addCallback(compare_result, docker_result) return d d.addCallback(got_result) return d class BasicTests(TestCase, GenerallyUsefulPowerstripTestMixin): if "TEST_PASSTHRU" not in os.environ: skip = "Skipping passthru tests." def tearDown(self): shutdowns = [ self.proxyServer.stopListening()] if hasattr(self, "nullServer"): shutdowns.append(self.nullServer.stopListening()) return defer.gatherResults(shutdowns) def test_run(self): self._configure("endpoints: {}\nadapters: {}", dockerOnSocket=True, realDockerSocket="/var/run/docker.sock") self.config.read_and_parse() d = CompareDockerAndPowerstrip(self, "docker run ubuntu echo hello") def assertions((powerstrip, docker)): self.assertNotIn("fatal", docker) self.assertNotIn("fatal", powerstrip) self.assertIn("hello", docker) self.assertIn("hello", powerstrip) d.addCallback(assertions) return d def test_run_post_hook(self): self._configure(""" endpoints: "POST /*/containers/*/attach": post: [nothing] adapters: nothing: http://devnull/ """, dockerOnSocket=True, realDockerSocket="/var/run/docker.sock") self.config.read_and_parse() d = CompareDockerAndPowerstrip(self, "docker run ubuntu echo hello") def assertions((powerstrip, docker)): self.assertNotIn("fatal", docker) self.assertNotIn("fatal", powerstrip) self.assertIn("hello", docker) self.assertIn("hello", powerstrip) d.addCallback(assertions) return d def test_run_post_hook_tty(self): self._configure(""" endpoints: "POST /*/containers/*/attach": post: [nothing] adapters: nothing: http://devnull/ """, dockerOnSocket=True, realDockerSocket="/var/run/docker.sock") self.config.read_and_parse() d = CompareDockerAndPowerstrip(self, "docker run -ti ubuntu echo hello", usePTY=True) def assertions((powerstrip, docker)): self.assertNotIn("fatal", docker) self.assertNotIn("fatal", powerstrip) self.assertIn("hello", docker) self.assertIn("hello", powerstrip) d.addCallback(assertions) return d def test_run_tty(self): self._configure("endpoints: {}\nadapters: {}", dockerOnSocket=True, realDockerSocket="/var/run/docker.sock") self.config.read_and_parse() d = CompareDockerAndPowerstrip(self, "docker run -ti ubuntu echo hello", usePTY=True) def assertions((powerstrip, docker)): self.assertNotIn("fatal", docker) self.assertNotIn("fatal", powerstrip) self.assertIn("hello", docker) self.assertIn("hello", powerstrip) d.addCallback(assertions) return d def test_logs(self): self._configure("""endpoints: {}\nadapters: {}""", dockerOnSocket=True, realDockerSocket="/var/run/docker.sock") self.config.read_and_parse() d = CompareDockerAndPowerstrip(self, """ id=$(docker run -d ubuntu bash -c \\ "for X in range {1..10000}; do echo \\$X; done"); docker wait $id >/dev/null; echo $id """, expectDifferentResults=True) def extractDockerPS((powerstrip, docker)): containerID = docker.split("\n")[0] return CompareDockerAndPowerstrip(self, "docker logs %s" % (containerID,)) d.addCallback(extractDockerPS) return d NULL_CONFIG = """ endpoints: "* *": pre: [nothing] post: [nothing] adapters: nothing: http://localhost:%d/null-adapter """ NULL_TWICE_CONFIG = """ endpoints: "* *": pre: [nothing, nothing2] post: [nothing, nothing2] adapters: nothing: http://localhost:%d/null-adapter nothing2: http://localhost:%d/null-adapter """ def test_run_null_adapter(self): self._getNullAdapter() self._configure(self.NULL_CONFIG % (self.nullPort,), dockerOnSocket=True, realDockerSocket="/var/run/docker.sock") self.config.read_and_parse() d = CompareDockerAndPowerstrip(self, "docker run -ti ubuntu echo hello", usePTY=True) def assertions((powerstrip, docker)): self.assertNotIn("fatal", docker) self.assertNotIn("fatal", powerstrip) self.assertIn("hello", docker) self.assertIn("hello", powerstrip) d.addCallback(assertions) return d def test_run_null_twice_adapter(self): self._getNullAdapter() self._configure(self.NULL_TWICE_CONFIG % (self.nullPort, self.nullPort), dockerOnSocket=True, realDockerSocket="/var/run/docker.sock") self.config.read_and_parse() d = CompareDockerAndPowerstrip(self, "docker run -ti ubuntu echo hello", usePTY=True) def assertions((powerstrip, docker)): self.assertNotIn("fatal", docker) self.assertNotIn("fatal", powerstrip) self.assertIn("hello", docker) self.assertIn("hello", powerstrip) d.addCallback(assertions) return d def test_run_docker_pull(self): self._getNullAdapter() self._configure(self.NULL_CONFIG % (self.nullPort,), dockerOnSocket=True, realDockerSocket="/var/run/docker.sock") self.config.read_and_parse() d = CompareDockerAndPowerstrip(self, "docker rmi busybox; docker pull busybox", usePTY=True, expectDifferentResults=True) def assertions((powerstrip, docker)): self.assertNotIn("fatal", docker) text = ["Downloaded newer image for busybox", "Untagged", "Deleted", "Downloading", "Extracting"] for textLine in text: self.assertIn(textLine, powerstrip) self.assertIn(textLine, docker) d.addCallback(assertions) return d def test_run_docker_build(self): self._getNullAdapter() self._configure(self.NULL_CONFIG % (self.nullPort,), dockerOnSocket=True, realDockerSocket="/var/run/docker.sock") self.config.read_and_parse() d = CompareDockerAndPowerstrip(self, "cd ../fixtures/free-dockerfile; docker build .", usePTY=True) def assertions((powerstrip, docker)): self.assertNotIn("fatal", docker) d.addCallback(assertions) return d def test_run_stdin(self): self._configure("endpoints: {}\nadapters: {}", dockerOnSocket=True, realDockerSocket="/var/run/docker.sock") self.config.read_and_parse() return CompareDockerAndPowerstrip(self, "echo hello |docker run -i ubuntu cat") from twisted.internet.utils import _BackRelay
Apache License 2.0
mitmproxy/mitmproxy
mitmproxy/flow.py
Flow.backup
python
def backup(self, force=False): if not self._backup: self._backup = self.get_state()
Save a backup of this flow, which can be restored by calling `Flow.revert()`.
https://github.com/mitmproxy/mitmproxy/blob/667d4e04749a4bc2212f58fa2b8c31cd1d91fc7b/mitmproxy/flow.py#L178-L183
import time import typing import uuid from mitmproxy import controller, connection from mitmproxy import exceptions from mitmproxy import stateobject from mitmproxy import version class Error(stateobject.StateObject): msg: str timestamp: float KILLED_MESSAGE: typing.ClassVar[str] = "Connection killed." def __init__(self, msg: str, timestamp: typing.Optional[float] = None) -> None: self.msg = msg self.timestamp = timestamp or time.time() _stateobject_attributes = dict( msg=str, timestamp=float ) def __str__(self): return self.msg def __repr__(self): return self.msg @classmethod def from_state(cls, state): f = cls(None) f.set_state(state) return f class Flow(stateobject.StateObject): client_conn: connection.Client server_conn: connection.Server error: typing.Optional[Error] = None intercepted: bool marked: str = "" is_replay: typing.Optional[str] def __init__( self, type: str, client_conn: connection.Client, server_conn: connection.Server, live: bool = None ) -> None: self.type = type self.id = str(uuid.uuid4()) self.client_conn = client_conn self.server_conn = server_conn self.live = live self.intercepted: bool = False self._backup: typing.Optional[Flow] = None self.reply: typing.Optional[controller.Reply] = None self.marked: str = "" self.is_replay: typing.Optional[str] = None self.metadata: typing.Dict[str, typing.Any] = dict() self.comment: str = "" _stateobject_attributes = dict( id=str, error=Error, client_conn=connection.Client, server_conn=connection.Server, type=str, intercepted=bool, is_replay=str, marked=str, metadata=typing.Dict[str, typing.Any], comment=str, ) def get_state(self): d = super().get_state() d.update(version=version.FLOW_FORMAT_VERSION) if self._backup and self._backup != d: d.update(backup=self._backup) return d def set_state(self, state): state = state.copy() state.pop("version") if "backup" in state: self._backup = state.pop("backup") super().set_state(state) @classmethod def from_state(cls, state): f = cls(None, None) f.set_state(state) return f def copy(self): f = super().copy() f.live = False if self.reply is not None: f.reply = controller.DummyReply() return f def modified(self): if self._backup: return self._backup != self.get_state() else: return False
MIT License
alliefitter/boto3_type_annotations
boto3_type_annotations_with_docs/boto3_type_annotations/s3/service_resource.py
ServiceResource.BucketTagging
python
def BucketTagging(self, bucket_name: str = None) -> 'BucketTagging': pass
Creates a BucketTagging resource.:: bucket_tagging = s3.BucketTagging('bucket_name') :type bucket_name: string :param bucket_name: The BucketTagging\'s bucket_name identifier. This **must** be set. :rtype: :py:class:`S3.BucketTagging` :returns: A BucketTagging resource
https://github.com/alliefitter/boto3_type_annotations/blob/2a88aa562b1aee6e8a6cc30402980884b3707fbb/boto3_type_annotations_with_docs/boto3_type_annotations/s3/service_resource.py#L116-L125
from boto3.s3.transfer import TransferConfig from boto3.resources.collection import ResourceCollection from typing import IO from typing import List from typing import Optional from typing import Union from botocore.client import BaseClient from typing import Callable from typing import Dict from datetime import datetime from boto3.resources import base class ServiceResource(base.ServiceResource): buckets: 'buckets' def Bucket(self, name: str = None) -> 'Bucket': pass def BucketAcl(self, bucket_name: str = None) -> 'BucketAcl': pass def BucketCors(self, bucket_name: str = None) -> 'BucketCors': pass def BucketLifecycle(self, bucket_name: str = None) -> 'BucketLifecycle': pass def BucketLifecycleConfiguration(self, bucket_name: str = None) -> 'BucketLifecycleConfiguration': pass def BucketLogging(self, bucket_name: str = None) -> 'BucketLogging': pass def BucketNotification(self, bucket_name: str = None) -> 'BucketNotification': pass def BucketPolicy(self, bucket_name: str = None) -> 'BucketPolicy': pass def BucketRequestPayment(self, bucket_name: str = None) -> 'BucketRequestPayment': pass
MIT License
vsjha18/nsetools
nse.py
Nse.get_bhavcopy_url
python
def get_bhavcopy_url(self, d): d = mkdate(d) day_of_month = d.strftime("%d") mon = d.strftime("%b").upper() year = d.year url = self.bhavcopy_base_url % (year, mon, day_of_month, mon, year) return url
take date and return bhavcopy url
https://github.com/vsjha18/nsetools/blob/b0e99c8decac0cba0bc19427428fd2d7b8836eaf/nse.py#L417-L424
import six import ast import re import json import zipfile import io from dateutil import parser from nsetools.bases import AbstractBaseExchange from nsetools.utils import byte_adaptor from nsetools.utils import js_adaptor if six.PY2: from urllib2 import build_opener, HTTPCookieProcessor, Request from urllib import urlencode from cookielib import CookieJar elif six.PY3: from urllib.request import build_opener, HTTPCookieProcessor, Request from urllib.parse import urlencode from http.cookiejar import CookieJar from nsetools.utils import byte_adaptor, js_adaptor from nsetools.datemgr import mkdate class Nse(AbstractBaseExchange): __CODECACHE__ = None def __init__(self): self.opener = self.nse_opener() self.headers = self.nse_headers() self.get_quote_url = 'https://www1.nseindia.com/live_market/dynaContent/live_watch/get_quote/GetQuote.jsp?' self.stocks_csv_url = 'http://www1.nseindia.com/content/equities/EQUITY_L.csv' self.top_gainer_url = 'http://www1.nseindia.com/live_market/dynaContent/live_analysis/gainers/niftyGainers1.json' self.top_loser_url = 'http://www1.nseindia.com/live_market/dynaContent/live_analysis/losers/niftyLosers1.json' self.top_fno_gainer_url = 'https://www1.nseindia.com/live_market/dynaContent/live_analysis/gainers/fnoGainers1.json' self.top_fno_loser_url = 'https://www1.nseindia.com/live_market/dynaContent/live_analysis/losers/fnoLosers1.json' self.advances_declines_url = 'http://www1.nseindia.com/common/json/indicesAdvanceDeclines.json' self.index_url="http://www1.nseindia.com/homepage/Indices1.json" self.bhavcopy_base_url = "https://www1.nseindia.com/content/historical/EQUITIES/%s/%s/cm%s%s%sbhav.csv.zip" self.bhavcopy_base_filename = "cm%s%s%sbhav.csv" self.active_equity_monthly_url = "https://www1.nseindia.com/products/dynaContent/equities/equities/json/mostActiveMonthly.json" self.year_high_url = "https://www1.nseindia.com/products/dynaContent/equities/equities/json/online52NewHigh.json" self.year_low_url = "https://www1.nseindia.com/products/dynaContent/equities/equities/json/online52NewLow.json" self.preopen_nifty_url = "https://www1.nseindia.com/live_market/dynaContent/live_analysis/pre_open/nifty.json" self.preopen_fno_url = "https://www1.nseindia.com/live_market/dynaContent/live_analysis/pre_open/fo.json" self.preopen_niftybank_url = "https://www1.nseindia.com/live_market/dynaContent/live_analysis/pre_open/niftybank.json" self.fno_lot_size_url = "https://www1.nseindia.com/content/fo/fo_mktlots.csv" def get_fno_lot_sizes(self, cached=True, as_json=False): url = self.fno_lot_size_url req = Request(url, None, self.headers) res_dict = {} if cached is not True or self.__CODECACHE__ is None: res = self.opener.open(req) if res is not None: res = byte_adaptor(res) for line in res.read().split('\n'): if line != '' and re.search(',', line) and (line.casefold().find('symbol') == -1): (code, name) = [x.strip() for x in line.split(',')[1:3]] res_dict[code] = int(name) else: raise Exception('no response received') self.__CODECACHE__ = res_dict return self.render_response(self.__CODECACHE__, as_json) def get_stock_codes(self, cached=True, as_json=False): url = self.stocks_csv_url req = Request(url, None, self.headers) res_dict = {} if cached is not True or self.__CODECACHE__ is None: res = self.opener.open(req) if res is not None: res = byte_adaptor(res) for line in res.read().split('\n'): if line != '' and re.search(',', line): (code, name) = line.split(',')[0:2] res_dict[code] = name else: raise Exception('no response received') self.__CODECACHE__ = res_dict return self.render_response(self.__CODECACHE__, as_json) def is_valid_code(self, code): if code: stock_codes = self.get_stock_codes() if code.upper() in stock_codes.keys(): return True else: return False def get_quote(self, code, as_json=False): code = code.upper() if self.is_valid_code(code): url = self.build_url_for_quote(code) req = Request(url, None, self.headers) res = self.opener.open(req) res = byte_adaptor(res) res = res.read() match = re.search( r'<div\s+id="responseDiv"\s+style="display:none">(.*?)</div>', res, re.S ) try: buffer = match.group(1).strip() response = self.clean_server_response(json.loads(buffer)['data'][0]) except SyntaxError as err: raise Exception('ill formatted response') else: return self.render_response(response, as_json) else: return None def get_top_gainers(self, as_json=False): url = self.top_gainer_url req = Request(url, None, self.headers) res = self.opener.open(req) res = byte_adaptor(res) res_dict = json.load(res) res_list = [self.clean_server_response(item) for item in res_dict['data']] return self.render_response(res_list, as_json) def get_top_losers(self, as_json=False): url = self.top_loser_url req = Request(url, None, self.headers) res = self.opener.open(req) res = byte_adaptor(res) res_dict = json.load(res) res_list = [self.clean_server_response(item) for item in res_dict['data']] return self.render_response(res_list, as_json) def get_top_fno_gainers(self, as_json=False): url = self.top_fno_gainer_url req = Request(url, None, self.headers) res = self.opener.open(req) res = byte_adaptor(res) res_dict = json.load(res) res_list = [self.clean_server_response(item) for item in res_dict['data']] return self.render_response(res_list, as_json) def get_top_fno_losers(self, as_json=False): url = self.top_fno_loser_url req = Request(url, None, self.headers) res = self.opener.open(req) res = byte_adaptor(res) res_dict = json.load(res) res_list = [self.clean_server_response(item) for item in res_dict['data']] return self.render_response(res_list, as_json) def get_advances_declines(self, as_json=False): url = self.advances_declines_url req = Request(url, None, self.headers) resp = self.opener.open(req) resp = byte_adaptor(resp) resp_dict = json.load(resp) resp_list = [self.clean_server_response(item) for item in resp_dict['data']] return self.render_response(resp_list, as_json) def get_index_list(self, as_json=False): url = self.index_url req = Request(url, None, self.headers) resp = self.opener.open(req) resp = byte_adaptor(resp) resp_list = json.load(resp)['data'] index_list = [str(item['name']) for item in resp_list] return self.render_response(index_list, as_json) def get_active_monthly(self, as_json=False): return self._get_json_response_from_url(self.active_equity_monthly_url, as_json) def get_year_high(self, as_json=False): return self._get_json_response_from_url(self.year_high_url, as_json) def get_year_low(self, as_json=False): return self._get_json_response_from_url(self.year_low_url, as_json) def get_preopen_nifty(self, as_json=False): return self._get_json_response_from_url(self.preopen_nifty_url, as_json) def get_preopen_niftybank(self, as_json=False): return self._get_json_response_from_url(self.preopen_niftybank_url, as_json) def get_preopen_fno(self, as_json=False): return self._get_json_response_from_url(self.preopen_fno_url, as_json) def _get_json_response_from_url(self, url, as_json): req = Request(url, None, self.headers) res = self.opener.open(req) res = byte_adaptor(res) res_dict = json.load(res) res_list = [self.clean_server_response(item) for item in res_dict['data']] return self.render_response(res_list, as_json) def is_valid_index(self, code): index_list = self.get_index_list() return True if code.upper() in index_list else False def get_index_quote(self, code, as_json=False): url = self.index_url if self.is_valid_index(code): req = Request(url, None, self.headers) resp = self.opener.open(req) resp = byte_adaptor(resp) resp_list = json.load(resp)['data'] resp_list = [self.clean_server_response(item) for item in resp_list] search_flag = False for item in resp_list: if item['name'] == code.upper(): search_flag = True break return self.render_response(item, as_json) if search_flag else None def nse_headers(self): return {'Accept': '*/*', 'Accept-Language': 'en-US,en;q=0.5', 'Host': 'www1.nseindia.com', 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0', 'X-Requested-With': 'XMLHttpRequest' } def nse_opener(self): cj = CookieJar() return build_opener(HTTPCookieProcessor(cj)) def build_url_for_quote(self, code): if code is not None and type(code) is str: encoded_args = urlencode([('symbol', code), ('illiquid', '0'), ('smeFlag', '0'), ('itpFlag', '0')]) return self.get_quote_url + encoded_args else: raise Exception('code must be string') def clean_server_response(self, resp_dict): d = {} for key, value in resp_dict.items(): d[str(key)] = value resp_dict = d for key, value in resp_dict.items(): if type(value) is str or isinstance(value, six.string_types): if re.match('-', value): try: if float(value) or int(value): dataType = True except ValueError: resp_dict[key] = None elif re.search(r'^[0-9,.]+$', value): resp_dict[key] = float(re.sub(',', '', value)) else: resp_dict[key] = str(value) return resp_dict def render_response(self, data, as_json=False): if as_json is True: return json.dumps(data) else: return data
MIT License
gigalixir/gigalixir-cli
gigalixir/__init__.py
ps_migrate
python
def ps_migrate(ctx, app_name, migration_app_name, ssh_opts, ssh_cmd): gigalixir_app.migrate(ctx.obj['host'], app_name, migration_app_name, ssh_opts, ssh_cmd)
Run Ecto Migrations on a production node.
https://github.com/gigalixir/gigalixir-cli/blob/e45992394a0b332252d6be7e1e64ea4bff4a8829/gigalixir/__init__.py#L426-L430
from .shell import cast, call from .routers.linux import LinuxRouter from .routers.darwin import DarwinRouter from .routers.windows import WindowsRouter from .openers.linux import LinuxOpener from .openers.darwin import DarwinOpener from .openers.windows import WindowsOpener from . import observer as gigalixir_observer from . import user as gigalixir_user from . import mfa as gigalixir_mfa from . import app as gigalixir_app from . import config as gigalixir_config from . import permission as gigalixir_permission from . import release as gigalixir_release from . import app_activity as gigalixir_app_activity from . import api_key as gigalixir_api_key from . import ssh_key as gigalixir_ssh_key from . import log_drain as gigalixir_log_drain from . import payment_method as gigalixir_payment_method from . import domain as gigalixir_domain from . import invoice as gigalixir_invoice from . import usage as gigalixir_usage from . import database as gigalixir_database from . import free_database as gigalixir_free_database from . import canary as gigalixir_canary from . import git import click import requests import getpass import stripe import subprocess import sys import re import uuid import rollbar import logging import json import netrc import os import platform from functools import wraps import pkg_resources def _show_usage_error(self, file=None): if file is None: file = click._compat.get_text_stderr() color = None if self.ctx is not None: color = self.ctx.color click.echo(self.ctx.get_help() + '\n', file=file, color=color) click.echo('Error: %s' % self.format_message(), file=file, color=color) click.exceptions.UsageError.show = _show_usage_error ROLLBAR_POST_CLIENT_ITEM = "40403cdd48904a12b6d8d27050b12343" env = os.environ.get("GIGALIXIR_ENV", "prod") if env == "prod": rollbar.init(ROLLBAR_POST_CLIENT_ITEM, 'production', enabled=True, allow_logging_basic_config=False) elif env == "dev": rollbar.init(ROLLBAR_POST_CLIENT_ITEM, 'development', enabled=False, allow_logging_basic_config=False) elif env == "test": rollbar.init(ROLLBAR_POST_CLIENT_ITEM, 'test', enabled=False, allow_logging_basic_config=False) else: raise Exception("Invalid GIGALIXIR_ENV") def detect_app_name(f): @wraps(f) def wrapper(*args, **kwds): app_name = kwds['app_name'] if app_name is None: app_name = detect_app() kwds['app_name'] = app_name f(*args, **kwds) return wrapper def report_errors(f): @wraps(f) def wrapper(*args, **kwds): try: f(*args, **kwds) except: version = pkg_resources.get_distribution("gigalixir").version rollbar.report_exc_info(sys.exc_info(), payload_data={"version": version}) logging.getLogger("gigalixir-cli").error(sys.exc_info()[1]) sys.exit(1) return wrapper def rollbar_fingerprint(e): return e[1].__str__() CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) def CatchAllExceptions(cls, handler): class Cls(cls): _original_args = None def make_context(self, info_name, args, parent=None, **extra): self._original_args = ' '.join(args) try: return super(Cls, self).make_context( info_name, args, parent=parent, **extra) except Exception as exc: handler(self, info_name, exc) raise def invoke(self, ctx): try: return super(Cls, self).invoke(ctx) except Exception as exc: handler(self, ctx.info_name, exc) raise return Cls def handle_exception(cmd, info_name, exc): msg = 'command:{} {} error:{}'.format(info_name, cmd._original_args, exc) rollbar.report_message(msg, 'warning') class AliasedGroup(click.Group): def get_command(self, ctx, cmd_name): rv = click.Group.get_command(self, ctx, cmd_name) if rv is not None: return rv aliases = { "configs": "config", "set_config": "deprecated:set_config", "databases": "pg", "scale_database": "pg:scale", "delete_database": "pg:destroy", "status": "ps", "scale": "ps:scale", "rollback": "releases:rollback", "remote_console": "ps:remote_console", "ssh": "ps:ssh", "migrate": "ps:migrate", "set_payment_method": "account:payment_method:set", "payment_method": "account:payment_method", "set_password": "account:password:set", "change_password": "account:password:change", "reset_api_key": "account:api_key:reset", "upgrade": "account:upgrade", "log_drains": "drains", "delete_log_drain": "drains:remove", "ssh_keys": "account:ssh_keys", "add_log_drain": "drains:add", "add_ssh_key": "account:ssh_keys:add", "delete_ssh_key": "account:ssh_keys:remove", "add_domain": "domains:add", "send_email_confirmation_token": "account:confirmation:resend", "send_reset_password_token": "account:password:reset", "delete_app": "apps:destroy", "delete_permission": "access:remove", "permissions": "access", "delete_free_database": "deprecated:delete_free_database", "free_databases": "deprecated:free_databases", "create_free_database": "deprecated:create_free_database", "delete_domain": "domains:remove", "delete_config": "config:unset", "add_permission": "access:add", "create_database": "pg:create", "set_git_remote": "git:remote", "invoices": "account:invoices", "current_period_usage": "account:usage", "observer": "ps:observer", "create": "apps:create", "restart": "ps:restart", } if cmd_name not in aliases: return None else: return click.Group.get_command(self, ctx, aliases[cmd_name]) def detect_app(): try: git.check_for_git() remote = call("git remote -v") repo_name = re.search('git.gigalixir.com/(.*) ', remote).group(1) git_pos = repo_name.find(".git") if git_pos >= 0: repo_name = repo_name[:git_pos] return repo_name except (AttributeError, subprocess.CalledProcessError): raise Exception("Could not detect app name. Try passing the app name explicitly with the `-a` flag or create an app with `gigalixir create`.") @click.group(cls=AliasedGroup, context_settings=CONTEXT_SETTINGS) @click.option('--env', envvar='GIGALIXIR_ENV', default='prod', help="GIGALIXIR environment [prod, dev, test].") @click.pass_context def cli(ctx, env): ctx.obj = {} logging.basicConfig(format='%(message)s') logging.getLogger("gigalixir-cli").setLevel(logging.INFO) if env == "prod": stripe.api_key = 'pk_live_45dmSl66k4xLy4X4yfF3RVpd' host = "https://api.gigalixir.com" elif env == "dev": stripe.api_key = 'pk_test_6tMDkFKTz4N0wIFQZHuzOUyW' host = "http://localhost:4000" elif env == "test": stripe.api_key = 'pk_test_6tMDkFKTz4N0wIFQZHuzOUyW' host = "https://api.gigalixir.com" else: raise Exception("Invalid GIGALIXIR_ENV") ctx.obj['host'] = host ctx.obj['env'] = env PLATFORM = platform.system().lower() if PLATFORM == "linux": ctx.obj['router'] = LinuxRouter() ctx.obj['opener'] = LinuxOpener() elif PLATFORM == "darwin": ctx.obj['router'] = DarwinRouter() ctx.obj['opener'] = DarwinOpener() elif PLATFORM == "windows": try: os.environ['HOME'] except KeyError: os.environ['HOME'] = os.environ['USERPROFILE'] ctx.obj['router'] = WindowsRouter() ctx.obj['opener'] = WindowsOpener() else: raise Exception("Unknown platform: %s" % PLATFORM) def print_help(ctx, subcommand): if subcommand is None: click.echo(ctx.parent.get_help(), color=ctx.color) else: subcommand_obj = cli.get_command(ctx, subcommand) if subcommand_obj is None: click.echo("command %s not found" % subcommand) else: ctx.info_name = subcommand click.echo(subcommand_obj.get_help(ctx)) @cli.command() @click.argument('subcommand', required=False) @click.pass_context @report_errors def help(ctx, subcommand): print_help(ctx, subcommand) @cli.command(name='ps') @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.pass_context @report_errors @detect_app_name def status(ctx, app_name): gigalixir_app.status(ctx.obj['host'], app_name) @cli.command(name='pg:scale') @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.option('-d', '--database_id', required=True) @click.option('-s', '--size', type=float, default=0.6, help='Size of the database can be 0.6, 1.7, 4, 8, 16, 32, 64, or 128.') @click.pass_context @report_errors @detect_app_name def scale_database(ctx, app_name, database_id, size): gigalixir_database.scale(ctx.obj['host'], app_name, database_id, size) @cli.command(name='ps:scale') @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.option('-r', '--replicas', type=int, help='Number of replicas to run.') @click.option('-s', '--size', type=float, help='Size of each replica between 0.5 and 128 in increments of 0.1.') @click.pass_context @report_errors @detect_app_name def scale(ctx, app_name, replicas, size): if not app_name: raise Exception("app_name is required") gigalixir_app.scale(ctx.obj['host'], app_name, replicas, size) @cli.command(name='releases:rollback') @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.option('-r', '--version', default=None, help='The version of the release to revert to. Use gigalixir get releases to find the version. If omitted, this defaults to the second most recent release.') @click.pass_context @report_errors @detect_app_name def rollback(ctx, app_name, version): gigalixir_app.rollback(ctx.obj['host'], app_name, version) @cli.command(name='ps:remote_console') @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.option('-o', '--ssh_opts', default="", help='Command-line options to pass to ssh.') @click.option('-c', '--ssh_cmd', default="ssh", help='Which ssh command to use.') @click.pass_context @report_errors @detect_app_name def remote_console(ctx, app_name, ssh_opts, ssh_cmd): gigalixir_app.remote_console(ctx.obj['host'], app_name, ssh_opts, ssh_cmd) @cli.command(name='ps:run') @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.argument('command', nargs=-1) @click.option('-o', '--ssh_opts', default="", help='Command-line options to pass to ssh.') @click.option('-c', '--ssh_cmd', default="ssh", help='Which ssh command to use.') @click.pass_context @report_errors @detect_app_name def ps_run(ctx, app_name, ssh_opts, ssh_cmd, command): gigalixir_app.ps_run(ctx.obj['host'], app_name, ssh_opts, ssh_cmd, *command) @cli.command(name='ps:ssh') @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.argument('command', nargs=-1) @click.option('-o', '--ssh_opts', default="", help='Command-line options to pass to ssh.') @click.option('-c', '--ssh_cmd', default="ssh", help='Which ssh command to use.') @click.pass_context @report_errors @detect_app_name def ssh(ctx, app_name, ssh_opts, ssh_cmd, command): gigalixir_app.ssh(ctx.obj['host'], app_name, ssh_opts, ssh_cmd, *command) @cli.command(name='ps:distillery') @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.argument('distillery_command', nargs=-1) @click.option('-o', '--ssh_opts', default="", help='Command-line options to pass to ssh.') @click.option('-c', '--ssh_cmd', default="ssh", help='Which ssh command to use.') @click.pass_context @report_errors @detect_app_name def distillery(ctx, app_name, ssh_opts, ssh_cmd, distillery_command): gigalixir_app.distillery_command(ctx.obj['host'], app_name, ssh_opts, ssh_cmd, *distillery_command) @cli.command(name='ps:restart') @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.pass_context @report_errors @detect_app_name def restart(ctx, app_name): gigalixir_app.restart(ctx.obj['host'], app_name) @cli.command() @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.argument('command', nargs=-1) @click.pass_context @report_errors @detect_app_name def run(ctx, app_name, command): gigalixir_app.run(ctx.obj['host'], app_name, command) @cli.command(name='ps:migrate') @click.option('-a', '--app_name', envvar="GIGALIXIR_APP") @click.option('-m', '--migration_app_name', default=None, help='For umbrella apps, specify which inner app to migrate.') @click.option('-o', '--ssh_opts', default="", help='Command-line options to pass to ssh.') @click.option('-c', '--ssh_cmd', default="ssh", help='Which ssh command to use.') @click.pass_context @report_errors @detect_app_name
MIT License
sodacookie/anime
anime/core/rubberband.py
RubberBand.set_speed
python
def set_speed(self, name, speed): self._filters[name].speed = speed
Override the set of the name's given filter
https://github.com/sodacookie/anime/blob/44546b5254a659a38bed744e81008d999a328fb2/anime/core/rubberband.py#L168-L170
from functools import reduce from types import FunctionType, MethodType from copy import deepcopy from anime.core.filter import Filter import anime.core.reducer as reducer class RubberBand(object): def __init__(self): super().__init__() object.__setattr__(self, "_reducers", {}) object.__setattr__(self, "_owner", None) object.__setattr__(self, "_children", []) object.__setattr__(self, "_dirty", False) object.__setattr__(self, "_dirtied", set()) object.__setattr__(self, "_filters", {}) object.__setattr__(self, "_dest", {}) def __setattr__(self, name, value): if hasattr(type(self), name) and isinstance(getattr(type(self), name), property): object.__setattr__(self, name, value) return if hasattr(self, name): if self._filters.get(name): self._set_dirty(name) self._dest[name] = value else: object.__setattr__(self, name, value) else: object.__setattr__(self, name, value) if isinstance(value, (int, float, complex)): self._reducers[name] = reducer.addition_reducer else: self._reducers[name] = reducer.top_level_reducer def set_owner(self, owner): object.__setattr__(self, '_owner', owner) owner._children.append(self) def get_owner(self): return self._owner def get_children(self): return self._children def get_dest(self, name): return self._dest.get(name) def set_filter(self, name, filter, speed=None, done=None): if not hasattr(self, name): raise ValueError("Does not contain the attribute %s" % name) if isinstance(filter, FunctionType): if speed is None: speed = 0 self._filters[name] = Filter(filter, done, speed) else: cpy_filter = deepcopy(filter) if speed is not None: cpy_filter.speed = speed self._filters[name] = cpy_filter def get_filter(self, name): return self._filters.get(name) def remove_filter(self, name): if self._dest.get(name): object.__setattr__(self, name, self._dest[name]) self._set_clean(name) del self._filters[name] def set_reducer(self, name, reducer): self._reducers[name] = reducer def force_set(self, name, value): object.__setattr__(self, name, value) if self.is_attr_dirty(name): self._set_clean(name) def group_set(self, _setter=None, _quiet=False, **kwarg): if not _setter: _setter = {} _setter.update(kwarg) for key, value in _setter.items(): if hasattr(self, key): setattr(self, key, value) else: if not _quiet: raise AttributeError("Object has no attribute '%s'"%key) def force_group_set(self, _setter=None, _quiet=False, **kwarg): if not _setter: _setter = {} _setter.update(kwarg) for key, value in _setter.items(): if hasattr(self, key): self.force_set(key, value) else: if not _quiet: raise AttributeError("Object has no attribute '%s'"%key) def get_speed(self, name): return self._filters[name].speed
MIT License
nipy/nipy
nipy/algorithms/graph/bipartite_graph.py
BipartiteGraph.__init__
python
def __init__(self, V, W, edges=None, weights=None): V = int(V) W = int(W) if (V < 1) or (W < 1): raise ValueError('cannot create graph with no vertex') self.V = V self.W = W self.E = 0 if (edges is None) & (weights is None): self.edges = np.array([], np.int) self.weights = np.array([]) else: if edges.shape[0] == np.size(weights): E = edges.shape[0] self.E = E self.edges = - np.ones((E, 2), np.int) self.set_edges(edges) self.set_weights(weights) else: raise ValueError('Incompatible size of the edges and \ weights matrices')
Constructor Parameters ---------- V (int), the number of vertices of subset 1 W (int), the number of vertices of subset 2 edges=None: array of shape (self.E, 2) the edge array of the graph weights=None: array of shape (self.E) the asociated weights array
https://github.com/nipy/nipy/blob/d16d268938dcd5c15748ca051532c21f57cf8a22/nipy/algorithms/graph/bipartite_graph.py#L166-L197
from __future__ import absolute_import import numpy as np def check_feature_matrices(X, Y): if np.size(X) == X.shape[0]: X = np.reshape(X, (np.size(X), 1)) if np.size(Y) == Y.shape[0]: Y = np.reshape(Y, (np.size(Y), 1)) if X.shape[1] != Y.shape[1]: raise ValueError('X.shape[1] should = Y.shape[1]') def bipartite_graph_from_coo_matrix(x): i, j = x.nonzero() edges = np.vstack((i, j)).T weights = x.data wg = BipartiteGraph(x.shape[0], x.shape[1], edges, weights) return wg def bipartite_graph_from_adjacency(x): from scipy.sparse import coo_matrix return bipartite_graph_from_coo_matrix(coo_matrix(x)) def cross_eps(X, Y, eps=1.): from scipy.sparse import coo_matrix check_feature_matrices(X, Y) try: eps = float(eps) except: if np.isnan(eps): raise ValueError('eps is nan') if np.isinf(eps): raise ValueError('eps is inf') ij = np.zeros((0, 2)) data = np.zeros(0) for i, x in enumerate(X): dist = np.sum((Y - x) ** 2, 1) idx = np.asanyarray(np.where(dist < eps)) data = np.hstack((data, dist[idx.ravel()])) ij = np.vstack((ij, np.hstack(( i * np.ones((idx.size, 1)), idx.T)))).astype(np.int) data = np.maximum(data, 1.e-15) adj = coo_matrix((data, ij.T), shape=(X.shape[0], Y.shape[0])) return bipartite_graph_from_coo_matrix(adj) def cross_knn(X, Y, k=1): from scipy.sparse import coo_matrix check_feature_matrices(X, Y) try: k = int(k) except: if np.isnan(k): raise ValueError('k is nan') if np.isinf(k): raise ValueError('k is inf') k = min(k, Y.shape[0] -1) ij = np.zeros((0, 2)) data = np.zeros(0) for i, x in enumerate(X): dist = np.sum((Y - x) ** 2, 1) idx = np.argsort(dist)[:k] data = np.hstack((data, dist[idx])) ij = np.vstack((ij, np.hstack(( i * np.ones((k, 1)), np.reshape(idx, (k, 1)))))) data = np.maximum(data, 1.e-15) adj = coo_matrix((data, ij.T), shape=(X.shape[0], Y.shape[0])) return bipartite_graph_from_coo_matrix(adj) class BipartiteGraph(object):
BSD 3-Clause New or Revised License
microsoft/task_oriented_dialogue_as_dataflow_synthesis
src/dataflow/multiwoz/trade_dst/create_data.py
divideData
python
def divideData(data, output_dir): testListFile = [] fin = open(os.path.join(output_dir, "multi-woz", "testListFile.json"), "r") for line in fin: testListFile.append(line[:-1]) fin.close() valListFile = [] fin = open(os.path.join(output_dir, "multi-woz", "valListFile.json"), "r") for line in fin: valListFile.append(line[:-1]) fin.close() trainListFile = open(os.path.join(output_dir, "trainListFile"), "w") test_dials = [] val_dials = [] train_dials = [] count_train, count_val, count_test = 0, 0, 0 for dialogue_name in data: dial_item = data[dialogue_name] domains = [] for dom_k, dom_v in dial_item["goal"].items(): if ( dom_v and dom_k not in IGNORE_KEYS_IN_GOAL ): domains.append(dom_k) dial = get_dial(data[dialogue_name]) if dial: dialogue = {} dialogue["dialogue_idx"] = dialogue_name dialogue["domains"] = list( set(domains) ) last_bs = [] dialogue["dialogue"] = [] for turn_i, turn in enumerate(dial): turn_dialog = {} turn_dialog["system_transcript"] = ( dial[turn_i - 1]["sys"] if turn_i > 0 else "" ) turn_dialog["turn_idx"] = turn_i turn_dialog["belief_state"] = [ {"slots": [s], "act": "inform"} for s in turn["bvs"] ] turn_dialog["turn_label"] = [ bs["slots"][0] for bs in turn_dialog["belief_state"] if bs not in last_bs ] turn_dialog["transcript"] = turn["usr"] turn_dialog["system_acts"] = ( dial[turn_i - 1]["sys_a"] if turn_i > 0 else [] ) turn_dialog["domain"] = turn["domain"] last_bs = turn_dialog["belief_state"] dialogue["dialogue"].append(turn_dialog) if dialogue_name in testListFile: test_dials.append(dialogue) count_test += 1 elif dialogue_name in valListFile: val_dials.append(dialogue) count_val += 1 else: trainListFile.write(dialogue_name + "\n") train_dials.append(dialogue) count_train += 1 print( "# of dialogues: Train {}, Val {}, Test {}".format( count_train, count_val, count_test ) ) with open(os.path.join(output_dir, "dev_dials.json"), "w") as f: json.dump(val_dials, f, indent=4) with open(os.path.join(output_dir, "test_dials.json"), "w") as f: json.dump(test_dials, f, indent=4) with open(os.path.join(output_dir, "train_dials.json"), "w") as f: json.dump(train_dials, f, indent=4)
Given test and validation sets, divide the data for three different sets
https://github.com/microsoft/task_oriented_dialogue_as_dataflow_synthesis/blob/e848243ff88f4b0d1a383b8df2612a1f3edb5300/src/dataflow/multiwoz/trade_dst/create_data.py#L516-L607
import argparse import json import os import re import shutil from io import BytesIO from zipfile import ZipFile import numpy as np import requests from tqdm import tqdm np.set_printoptions(precision=3) np.random.seed(2) MAX_LENGTH = 50 IGNORE_KEYS_IN_GOAL = ["eod", "topic", "messageLen", "message"] MULTIWOZ_2_0_URL = "https://www.repository.cam.ac.uk/bitstream/handle/1810/280608/MULTIWOZ2.zip?sequence=3&isAllowed=y" MULTIWOZ_2_1_URL = "https://www.repository.cam.ac.uk/bitstream/handle/1810/294507/MULTIWOZ2.1.zip?sequence=1&isAllowed=y" def _create_replacement_pair(line): tok_from, tok_to = line.replace("\n", "").split("\t") return " " + tok_from + " ", " " + tok_to + " " REPLACEMENTS = [ _create_replacement_pair(line) for line in open(os.path.join(os.path.dirname(__file__), "mapping.pair"), "r") ] def is_ascii(s): return all(ord(c) < 128 for c in s) def insertSpace(token, text): sidx = 0 while True: sidx = text.find(token, sidx) if sidx == -1: break if ( sidx + 1 < len(text) and re.match("[0-9]", text[sidx - 1]) and re.match("[0-9]", text[sidx + 1]) ): sidx += 1 continue if text[sidx - 1] != " ": text = text[:sidx] + " " + text[sidx:] sidx += 1 if sidx + len(token) < len(text) and text[sidx + len(token)] != " ": text = text[: sidx + 1] + " " + text[sidx + 1 :] sidx += 1 return text def normalize(text): text = text.lower() text = re.sub(r"^\s*|\s*$", "", text) text = re.sub(r"b&b", "bed and breakfast", text) text = re.sub(r"b and b", "bed and breakfast", text) text = re.sub("(\u2018|\u2019)", "'", text) text = text.replace(";", ",") text = re.sub(r"$\/", "", text) text = text.replace("/", " and ") text = text.replace("-", " ") text = re.sub(r'["\<>@\(\)]', "", text) for token in ["?", ".", ",", "!"]: text = insertSpace(token, text) text = insertSpace("'s", text) text = re.sub(r"^'", "", text) text = re.sub(r"'$", "", text) text = re.sub(r"'\s", " ", text) text = re.sub(r"\s'", " ", text) for fromx, tox in REPLACEMENTS: text = " " + text + " " text = text.replace(fromx, tox)[1:-1] text = re.sub(" +", " ", text) tokens = text.split() i = 1 while i < len(tokens): if re.match(r"^\d+$", tokens[i]) and re.match(r"\d+$", tokens[i - 1]): tokens[i - 1] += tokens[i] del tokens[i] else: i += 1 text = " ".join(tokens) return text def fixDelex(filename, data, data2, idx, idx_acts): try: turn = data2[filename.strip(".json")][str(idx_acts)] except: return data if not isinstance(turn, str): for k, _act in turn.items(): if "Attraction" in k: if "restaurant_" in data["log"][idx]["text"]: data["log"][idx]["text"] = data["log"][idx]["text"].replace( "restaurant", "attraction" ) if "hotel_" in data["log"][idx]["text"]: data["log"][idx]["text"] = data["log"][idx]["text"].replace( "hotel", "attraction" ) if "Hotel" in k: if "attraction_" in data["log"][idx]["text"]: data["log"][idx]["text"] = data["log"][idx]["text"].replace( "attraction", "hotel" ) if "restaurant_" in data["log"][idx]["text"]: data["log"][idx]["text"] = data["log"][idx]["text"].replace( "restaurant", "hotel" ) if "Restaurant" in k: if "attraction_" in data["log"][idx]["text"]: data["log"][idx]["text"] = data["log"][idx]["text"].replace( "attraction", "restaurant" ) if "hotel_" in data["log"][idx]["text"]: data["log"][idx]["text"] = data["log"][idx]["text"].replace( "hotel", "restaurant" ) return data def getDialogueAct( filename, data, data2, idx, idx_acts ): acts = [] try: turn = data2[filename.strip(".json")][str(idx_acts)] except: return acts if not isinstance(turn, str): for k in turn.keys(): if k.split("-")[1].lower() == "request": for a in turn[k]: acts.append(a[0].lower()) elif k.split("-")[1].lower() == "inform": for a in turn[k]: acts.append([a[0].lower(), normalize(a[1].lower())]) return acts def get_summary_bstate(bstate, get_domain=False): domains = [ "taxi", "restaurant", "hospital", "hotel", "attraction", "train", "police", ] summary_bstate = [] summary_bvalue = [] active_domain = [] for domain in domains: domain_active = False booking = [] for slot in sorted(bstate[domain]["book"].keys()): if slot == "booked": if len(bstate[domain]["book"]["booked"]) != 0: booking.append(1) else: booking.append(0) else: if bstate[domain]["book"][slot] != "": booking.append(1) summary_bvalue.append( [ "{}-book {}".format(domain, slot.strip().lower()), normalize(bstate[domain]["book"][slot].strip().lower()), ] ) else: booking.append(0) if domain == "train": if "people" not in bstate[domain]["book"].keys(): booking.append(0) if "ticket" not in bstate[domain]["book"].keys(): booking.append(0) summary_bstate += booking for slot in bstate[domain]["semi"]: slot_enc = [0, 0, 0] if bstate[domain]["semi"][slot] == "not mentioned": slot_enc[0] = 1 elif bstate[domain]["semi"][slot] in [ "dont care", "dontcare", "don't care", "do not care", ]: slot_enc[1] = 1 summary_bvalue.append( ["{}-{}".format(domain, slot.strip().lower()), "dontcare"] ) elif bstate[domain]["semi"][slot]: summary_bvalue.append( [ "{}-{}".format(domain, slot.strip().lower()), normalize(bstate[domain]["semi"][slot].strip().lower()), ] ) if slot_enc != [0, 0, 0]: domain_active = True summary_bstate += slot_enc if domain_active: summary_bstate += [1] active_domain.append(domain) else: summary_bstate += [0] assert len(summary_bstate) == 94 if get_domain: return active_domain else: return summary_bstate, summary_bvalue def analyze_dialogue(dialogue, maxlen): d = dialogue if len(d["log"]) % 2 != 0: print("odd # of turns") return None d_pp = {} d_pp["goal"] = d["goal"] usr_turns = [] sys_turns = [] for i in range(len(d["log"])): if len(d["log"][i]["text"].split()) > maxlen: return None if i % 2 == 0: text = d["log"][i]["text"] if not is_ascii(text): return None usr_turns.append(d["log"][i]) else: text = d["log"][i]["text"] if not is_ascii(text): return None belief_summary, belief_value_summary = get_summary_bstate( d["log"][i]["metadata"] ) d["log"][i]["belief_summary"] = str(belief_summary) d["log"][i]["belief_value_summary"] = belief_value_summary sys_turns.append(d["log"][i]) d_pp["usr_log"] = usr_turns d_pp["sys_log"] = sys_turns return d_pp def get_dial(dialogue): dial = [] d_orig = analyze_dialogue(dialogue, MAX_LENGTH) if d_orig is None: return None usr = [t["text"] for t in d_orig["usr_log"]] sys = [t["text"] for t in d_orig["sys_log"]] sys_a = [t["dialogue_acts"] for t in d_orig["sys_log"]] bvs = [t["belief_value_summary"] for t in d_orig["sys_log"]] domain = [t["domain"] for t in d_orig["usr_log"]] for item in zip(usr, sys, sys_a, domain, bvs): dial.append( { "usr": item[0], "sys": item[1], "sys_a": item[2], "domain": item[3], "bvs": item[4], } ) return dial def download_data(url): r = requests.get(url, stream=True) total_size = int(r.headers.get("content-length", 0)) block_size = 1024 t = tqdm(total=total_size, unit="B", unit_scale=True) data = BytesIO() for block in r.iter_content(block_size): t.update(len(block)) data.write(block) t.close() if total_size != 0 and t.n != total_size: raise ValueError(f"Unexpected error during downloading {url}") return data def loadData(output_dir, use_multiwoz_2_1=False): data_url = os.path.join(output_dir, "multi-woz", "data.json") if not os.path.exists(output_dir): os.makedirs(output_dir) os.makedirs(os.path.join(output_dir, "multi-woz")) if not os.path.exists(data_url): print("Downloading and unzipping the MultiWOZ dataset") if not use_multiwoz_2_1: data = download_data(MULTIWOZ_2_0_URL) subdir_name = "MULTIWOZ2 2" else: data = download_data(MULTIWOZ_2_1_URL) subdir_name = "MULTIWOZ2.1" zip_ref = ZipFile(data) zip_ref.extractall(os.path.join(output_dir, "multi-woz")) zip_ref.close() shutil.copy( os.path.join(output_dir, "multi-woz", subdir_name, "data.json"), os.path.join(output_dir, "multi-woz"), ) shutil.copy( os.path.join(output_dir, "multi-woz", subdir_name, "valListFile.json"), os.path.join(output_dir, "multi-woz"), ) shutil.copy( os.path.join(output_dir, "multi-woz", subdir_name, "testListFile.json"), os.path.join(output_dir, "multi-woz"), ) shutil.copy( os.path.join(output_dir, "multi-woz", subdir_name, "dialogue_acts.json"), os.path.join(output_dir, "multi-woz"), ) def getDomain(idx, log, domains, last_domain): if idx == 1: active_domains = get_summary_bstate(log[idx]["metadata"], True) crnt_doms = active_domains[0] if len(active_domains) != 0 else domains[0] return crnt_doms else: ds_diff = get_ds_diff(log[idx - 2]["metadata"], log[idx]["metadata"]) if len(ds_diff.keys()) == 0: crnt_doms = last_domain else: crnt_doms = list(ds_diff.keys()) return crnt_doms[0] def get_ds_diff(prev_d, crnt_d): diff = {} if not prev_d or not crnt_d: return diff for ((k1, v1), (k2, v2)) in zip(prev_d.items(), crnt_d.items()): assert k1 == k2 if v1 != v2: diff[k2] = v2 return diff def createData(output_dir, use_multiwoz_2_1=False): loadData(output_dir, use_multiwoz_2_1) delex_data = {} fin1 = open(os.path.join(output_dir, "multi-woz", "data.json"), "r") data = json.load(fin1) fin2 = open(os.path.join(output_dir, "multi-woz", "dialogue_acts.json"), "r") data2 = json.load(fin2) print("Processing dialogues ...") for _didx, dialogue_name in tqdm(enumerate(data), unit=" dialogues"): dialogue = data[dialogue_name] domains = [] for dom_k, dom_v in dialogue["goal"].items(): if ( dom_v and dom_k not in IGNORE_KEYS_IN_GOAL ): domains.append(dom_k) idx_acts = 1 last_domain, _last_slot_fill = "", [] for idx, turn in enumerate(dialogue["log"]): origin_text = normalize(turn["text"]) dialogue["log"][idx]["text"] = origin_text if idx % 2 == 1: cur_domain = getDomain(idx, dialogue["log"], domains, last_domain) last_domain = [cur_domain] dialogue["log"][idx - 1]["domain"] = cur_domain dialogue["log"][idx]["dialogue_acts"] = getDialogueAct( dialogue_name, dialogue, data2, idx, idx_acts ) idx_acts += 1 dialogue = fixDelex(dialogue_name, dialogue, data2, idx, idx_acts) delex_data[dialogue_name] = dialogue return delex_data
MIT License
castagnait/plugin.video.netflix
resources/lib/navigation/library.py
LibraryActionExecutor.check_autoupdate_device
python
def check_autoupdate_device(self, pathitems): uuid = G.SHARED_DB.get_value('auto_update_device_uuid') if uuid is None: msg = common.get_local_string(30212) else: client_uuid = G.LOCAL_DB.get_value('client_uuid') msg = common.get_local_string(30210 if client_uuid == uuid else 30211) ui.show_notification(msg, time=8000)
Check if the current device manage the auto-updates of the shared-library (MySQL)
https://github.com/castagnait/plugin.video.netflix/blob/1c68c7d4c399603a5dcbeef1e7637de7a9036a72/resources/lib/navigation/library.py#L129-L137
import resources.lib.common as common import resources.lib.kodi.ui as ui import resources.lib.kodi.library_utils as lib_utils from resources.lib.globals import G from resources.lib.kodi.library import get_library_cls from resources.lib.utils.logging import LOG class LibraryActionExecutor: def __init__(self, params): LOG.debug('Initializing "LibraryActionExecutor" with params: {}', params) self.params = params @common.inject_video_id(path_offset=1) def export(self, videoid): get_library_cls().export_to_library(videoid) common.container_refresh() @common.inject_video_id(path_offset=1) def remove(self, videoid): if not ui.ask_for_confirmation(common.get_local_string(30030), common.get_local_string(30124)): return get_library_cls().remove_from_library(videoid) common.container_refresh(use_delay=True) @common.inject_video_id(path_offset=1) def update(self, videoid): get_library_cls().update_library(videoid) common.container_refresh() def sync_mylist(self, pathitems): if not ui.ask_for_confirmation(common.get_local_string(30122), common.get_local_string(30123)): return get_library_cls().sync_library_with_mylist() def auto_upd_run_now(self, pathitems): if not ui.ask_for_confirmation(common.get_local_string(30065), common.get_local_string(30231)): return get_library_cls().auto_update_library(False) def sync_mylist_sel_profile(self, pathitems): if _check_auto_update_running(): return preselect_guid = G.SHARED_DB.get_value('sync_mylist_profile_guid', G.LOCAL_DB.get_guid_owner_profile()) selected_guid = ui.show_profiles_dialog(title=common.get_local_string(30228), preselect_guid=preselect_guid) if not selected_guid: return G.SHARED_DB.set_value('sync_mylist_profile_guid', selected_guid) def purge(self, pathitems): if _check_auto_update_running(): return if not ui.ask_for_confirmation(common.get_local_string(30125), common.get_local_string(30126)): return get_library_cls().clear_library() def import_library(self, pathitems): if _check_auto_update_running(): return path = ui.show_browse_dialog(common.get_local_string(651), default_path=G.DATA_PATH) if path: if not ui.ask_for_confirmation(common.get_local_string(30140), common.get_local_string(20135)): return get_library_cls().import_library(path) @common.inject_video_id(path_offset=1) def export_new_episodes(self, videoid): get_library_cls().export_to_library_new_episodes(videoid) @common.inject_video_id(path_offset=1) def exclude_from_auto_update(self, videoid): lib_utils.set_show_excluded_from_auto_update(videoid, True) common.container_refresh() @common.inject_video_id(path_offset=1) def include_in_auto_update(self, videoid): lib_utils.set_show_excluded_from_auto_update(videoid, False) common.container_refresh() def mysql_test(self, pathitems): def set_autoupdate_device(self, pathitems): if _check_auto_update_running(): return random_uuid = common.get_random_uuid() G.LOCAL_DB.set_value('client_uuid', random_uuid) G.SHARED_DB.set_value('auto_update_device_uuid', random_uuid) ui.show_notification(common.get_local_string(30209), time=8000)
MIT License
aws-samples/aws-media-services-simple-live-workflow
CloudFormation/mediapackage_channel.py
create_channel
python
def create_channel(mediapackage, event, context, auto_id=True): if auto_id: channel_id = "%s-%s" % (resource_tools.stack_name(event), event["LogicalResourceId"]) else: channel_id = event["PhysicalResourceId"] channel = { "Id": channel_id, "Description": "CloudFormation Stack ID %s" % event["StackId"], "HlsIngest": {} } try: response = mediapackage.create_channel( Id=channel_id, Description="CloudFormation Stack ID %s" % event["StackId"] ) print(json.dumps(response)) print(response["Arn"]) attributes = { "Arn": response["Arn"], "PrimaryUrl": response["HlsIngest"]["IngestEndpoints"][0]["Url"], "PrimaryUsername": response["HlsIngest"]["IngestEndpoints"][0]["Username"], "PrimaryPassword": response["HlsIngest"]["IngestEndpoints"][0]["Password"], "SecondaryUrl": response["HlsIngest"]["IngestEndpoints"][1]["Url"], "SecondaryUsername": response["HlsIngest"]["IngestEndpoints"][1]["Username"], "SecondaryPassword": response["HlsIngest"]["IngestEndpoints"][1]["Password"] } print(attributes) result = { 'Status': 'SUCCESS', 'Data': attributes, 'ResourceId': channel_id } except Exception as ex: print(ex) result = { 'Status': 'FAILED', 'Data': {"Exception": str(ex)}, 'ResourceId': channel_id } return result
Create a MediaPackage channel Return the channel URL, username and password generated by MediaPackage
https://github.com/aws-samples/aws-media-services-simple-live-workflow/blob/924375b8148dc4f3f521b5c463ee1d7ed39a13d2/CloudFormation/mediapackage_channel.py#L45-L93
from botocore.vendored import requests import boto3 import json import string import random import resource_tools def event_handler(event, context): print("Event Input: %s" % json.dumps(event)) try: mediapackage = boto3.client('mediapackage') if event["RequestType"] == "Create": result = create_channel(mediapackage, event, context) elif event["RequestType"] == "Update": result = update_channel(mediapackage, event, context) elif event["RequestType"] == "Delete": result = delete_channel(mediapackage, event, context) except Exception as exp: print("Exception: %s" % exp) result = { 'Status': 'FAILED', 'Data': {"Exception": str(exp)}, 'ResourceId': None } resource_tools.send(event, context, result['Status'], result['Data'], result['ResourceId']) return
Apache License 2.0
aristoteleo/dynamo-release
dynamo/tools/recipes.py
velocity_N
python
def velocity_N( adata, group=None, recalculate_pca=True, recalculate_umap=True, del_2nd_moments=None, ): del_2nd_moments = DynamoAdataConfig.check_config_var(del_2nd_moments, DynamoAdataConfig.RECIPE_DEL_2ND_MOMENTS_KEY) var_columns = adata.var.columns layer_keys = adata.layers.keys() if not np.all([i in layer_keys for i in ["X_new", "X_total"]]): raise Exception(f"The `X_new`, `X_total` has to exist in your data before running velocity_N function.") for i in ["M_t", "M_tt", "M_n", "M_tn", "M_nn", "velocity_N", "velocity_T"]: if i in layer_keys: del adata.layers[i] for i in [ "alpha", "beta", "gamma", "half_life", "alpha_b", "alpha_r2", "gamma_b", "gamma_r2", "gamma_logLL", "delta_b", "delta_r2", "bs", "bf", "uu0", "ul0", "su0", "sl0", "U0", "S0", "total0", "beta_k", "gamma_k", ]: if i in var_columns: del adata.var[i] if group is not None: group_prefixes = [group + "_" + str(i) + "_" for i in adata.obs[group].unique()] for i in group_prefixes: for j in [ "alpha", "beta", "gamma", "half_life", "alpha_b", "alpha_r2", "gamma_b", "gamma_r2", "gamma_logLL", "delta_b", "delta_r2", "bs", "bf", "uu0", "ul0", "su0", "sl0", "U0", "S0", "total0", "beta_k", "gamma_k", ]: if i + j in var_columns: del adata.var[i + j] if recalculate_pca: pca(adata, np.log1p(adata[:, adata.var.use_for_pca].layers["X_new"]), pca_key="X_pca") for i in ["spliced", "unspliced", "X_spliced", "X_unspliced"]: if i in layer_keys: del adata.layers[i] if adata.uns["pp"]["experiment_type"] == "one-shot": dynamics( adata, one_shot_method="sci_fate", model="deterministic", group=group, del_2nd_moments=del_2nd_moments, ) elif adata.uns["pp"]["experiment_type"] == "kin": dynamics( adata, model="deterministic", est_method="twostep", group=group, del_2nd_moments=del_2nd_moments, ) else: raise Exception( f"velocity_N function only supports either the one-shot or kinetics (kin) metabolic labeling " f"experiment." ) if recalculate_umap: reduceDimension(adata, enforce=True) cell_velocities( adata, basis="pca", X=adata.layers["M_n"], V=adata.layers["velocity_N"], enforce=True, ) cell_velocities( adata, basis="umap", X=adata.layers["M_n"], V=adata.layers["velocity_N"], enforce=True, )
use new RNA based pca, umap, for velocity calculation and projection for kinetics or one-shot experiment. Note that currently velocity_N function only considers labeling data and removes splicing data if they exist. Parameters ---------- adata: :class:`~anndata.AnnData` AnnData object that stores data for the the kinetics or one-shot experiment, must include `X_new, X_total` layers. group: `str` or None (default: None) The cell group that will be used to calculate velocity in each separate group. This is useful if your data comes from different labeling condition, etc. recalculate_pca: `bool` (default: True) Whether to recalculate pca with the new RNA data. If setting to be False, you need to make sure the pca is already generated via new RNA. recalculate_umap: `bool` (default: True) Whether to recalculate umap with the new RNA data. If setting to be False, you need to make sure the umap is already generated via new RNA. del_2nd_moments: `None` or `bool` Whether to remove second moments or covariances. Default it is `None` rgument used for `dynamics` function. Returns ------- Nothing but the adata object is updated with the low dimensional (umap or pca) velocity projections with the new RNA or pca based RNA velocities.
https://github.com/aristoteleo/dynamo-release/blob/4e201df59e4d69ac642bc9de5a853cd038da891f/dynamo/tools/recipes.py#L728-L887
import numpy as np from .moments import moments from .connectivity import neighbors, normalize_knn_graph from .dynamics import dynamics from .dimension_reduction import reduceDimension from .cell_velocities import cell_velocities from .utils import set_transition_genes from ..preprocessing.utils import pca from ..configuration import DynamoAdataConfig def recipe_kin_data( adata, tkey=None, reset_X=True, X_total_layers=False, splicing_total_layers=False, n_top_genes=1000, keep_filtered_cells=None, keep_filtered_genes=None, keep_raw_layers=None, del_2nd_moments=None, ekey="M_t", vkey="velocity_T", basis="umap", rm_kwargs={}, ): from ..preprocessing import recipe_monocle from ..preprocessing.utils import pca, detect_datatype keep_filtered_cells = DynamoAdataConfig.check_config_var( keep_filtered_cells, DynamoAdataConfig.RECIPE_KEEP_FILTERED_CELLS_KEY ) keep_filtered_genes = DynamoAdataConfig.check_config_var( keep_filtered_genes, DynamoAdataConfig.RECIPE_KEEP_FILTERED_GENES_KEY ) keep_raw_layers = DynamoAdataConfig.check_config_var(keep_raw_layers, DynamoAdataConfig.RECIPE_KEEP_RAW_LAYERS_KEY) del_2nd_moments = DynamoAdataConfig.check_config_var(del_2nd_moments, DynamoAdataConfig.RECIPE_DEL_2ND_MOMENTS_KEY) has_splicing, has_labeling, splicing_labeling, _ = detect_datatype(adata) if has_splicing and has_labeling and splicing_labeling: layers = ["X_new", "X_total", "X_uu", "X_ul", "X_su", "X_sl"] elif has_labeling: layers = ["X_new", "X_total"] if not has_labeling: raise Exception( "This recipe is only applicable to kinetics experiment datasets that have " "labeling data (at least either with `'uu', 'ul', 'su', 'sl'` or `'new', 'total'` " "layers." ) if has_splicing and has_labeling: recipe_monocle( adata, tkey=tkey, experiment_type="kin", reset_X=reset_X, X_total_layers=X_total_layers, splicing_total_layers=splicing_total_layers, n_top_genes=n_top_genes, total_layers=True, keep_filtered_cells=keep_filtered_cells, keep_filtered_genes=keep_filtered_genes, keep_raw_layers=keep_raw_layers, **rm_kwargs, ) tkey = adata.uns["pp"]["tkey"] moments(adata, group=tkey, layers=layers) CM = np.log1p(adata[:, adata.var.use_for_pca].layers["X_spliced"].A) cm_genesums = CM.sum(axis=0) valid_ind = np.logical_and(np.isfinite(cm_genesums), cm_genesums != 0) valid_ind = np.array(valid_ind).flatten() pca(adata, CM[:, valid_ind], pca_key="X_spliced_pca") neighbors(adata, X_data=adata.obsm["X_spliced_pca"], layer="X_spliced") conn = normalize_knn_graph(adata.obsp["connectivities"] > 0) moments(adata, conn=conn, layers=["X_spliced", "X_unspliced"]) dynamics( adata, model="deterministic", est_method="twostep", del_2nd_moments=del_2nd_moments, ) reduceDimension(adata, reduction_method=basis) cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis) else: recipe_monocle( adata, tkey=tkey, experiment_type="kin", reset_X=reset_X, X_total_layers=X_total_layers, splicing_total_layers=splicing_total_layers, n_top_genes=n_top_genes, total_layers=True, keep_filtered_cells=keep_filtered_cells, keep_filtered_genes=keep_filtered_genes, keep_raw_layers=keep_raw_layers, **rm_kwargs, ) dynamics( adata, model="deterministic", est_method="twostep", del_2nd_moments=del_2nd_moments, ) reduceDimension(adata, reduction_method=basis) cell_velocities(adata, basis=basis) return adata def recipe_deg_data( adata, tkey=None, reset_X=True, X_total_layers=False, splicing_total_layers=False, n_top_genes=1000, keep_filtered_cells=None, keep_filtered_genes=None, keep_raw_layers=None, del_2nd_moments=True, fraction_for_deg=False, ekey="M_s", vkey="velocity_S", basis="umap", rm_kwargs={}, ): from ..preprocessing import recipe_monocle from ..preprocessing.utils import pca, detect_datatype keep_filtered_cells = DynamoAdataConfig.check_config_var( keep_filtered_cells, DynamoAdataConfig.RECIPE_KEEP_FILTERED_CELLS_KEY ) keep_filtered_genes = DynamoAdataConfig.check_config_var( keep_filtered_genes, DynamoAdataConfig.RECIPE_KEEP_FILTERED_GENES_KEY ) keep_raw_layers = DynamoAdataConfig.check_config_var(keep_raw_layers, DynamoAdataConfig.RECIPE_KEEP_RAW_LAYERS_KEY) has_splicing, has_labeling, splicing_labeling, _ = detect_datatype(adata) if has_splicing and has_labeling and splicing_labeling: layers = ["X_new", "X_total", "X_uu", "X_ul", "X_su", "X_sl"] elif has_labeling: layers = ["X_new", "X_total"] if not has_labeling: raise Exception( "This recipe is only applicable to kinetics experiment datasets that have " "labeling data (at least either with `'uu', 'ul', 'su', 'sl'` or `'new', 'total'` " "layers." ) if has_splicing and has_labeling: recipe_monocle( adata, tkey=tkey, experiment_type="deg", reset_X=reset_X, X_total_layers=X_total_layers, splicing_total_layers=splicing_total_layers, n_top_genes=n_top_genes, total_layers=True, keep_filtered_cells=keep_filtered_cells, keep_filtered_genes=keep_filtered_genes, keep_raw_layers=keep_raw_layers, **rm_kwargs, ) tkey = adata.uns["pp"]["tkey"] moments(adata, layers=["X_spliced", "X_unspliced"]) CM = np.log1p(adata[:, adata.var.use_for_pca].layers["X_total"].A) cm_genesums = CM.sum(axis=0) valid_ind = np.logical_and(np.isfinite(cm_genesums), cm_genesums != 0) valid_ind = np.array(valid_ind).flatten() pca(adata, CM[:, valid_ind], pca_key="X_total_pca") neighbors(adata, X_data=adata.obsm["X_total_pca"], layer="X_total") conn = normalize_knn_graph(adata.obsp["connectivities"] > 0) moments(adata, conn=conn, group=tkey, layers=layers) dynamics( adata, model="deterministic", est_method="twostep", del_2nd_moments=del_2nd_moments, fraction_for_deg=fraction_for_deg, ) reduceDimension(adata, reduction_method=basis) try: set_transition_genes(adata) cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis) except BaseException: cell_velocities( adata, min_r2=adata.var.gamma_r2.min(), enforce=True, vkey=vkey, ekey=ekey, basis=basis, ) else: recipe_monocle( adata, tkey=tkey, experiment_type="deg", reset_X=reset_X, X_total_layers=X_total_layers, splicing_total_layers=splicing_total_layers, n_top_genes=n_top_genes, total_layers=True, keep_filtered_cells=keep_filtered_cells, keep_filtered_genes=keep_filtered_genes, keep_raw_layers=keep_raw_layers, **rm_kwargs, ) dynamics( adata, model="deterministic", del_2nd_moments=del_2nd_moments, fraction_for_deg=fraction_for_deg, ) reduceDimension(adata, reduction_method=basis) return adata def recipe_mix_kin_deg_data( adata, tkey=None, reset_X=True, X_total_layers=False, splicing_total_layers=False, n_top_genes=1000, keep_filtered_cells=None, keep_filtered_genes=None, keep_raw_layers=None, del_2nd_moments=None, ekey="M_t", vkey="velocity_T", basis="umap", rm_kwargs={}, ): from ..preprocessing import recipe_monocle from ..preprocessing.utils import pca, detect_datatype keep_filtered_cells = DynamoAdataConfig.check_config_var( keep_filtered_cells, DynamoAdataConfig.RECIPE_KEEP_FILTERED_CELLS_KEY ) keep_filtered_genes = DynamoAdataConfig.check_config_var( keep_filtered_genes, DynamoAdataConfig.RECIPE_KEEP_FILTERED_GENES_KEY ) keep_raw_layers = DynamoAdataConfig.check_config_var(keep_raw_layers, DynamoAdataConfig.RECIPE_KEEP_RAW_LAYERS_KEY) del_2nd_moments = DynamoAdataConfig.check_config_var(del_2nd_moments, DynamoAdataConfig.RECIPE_DEL_2ND_MOMENTS_KEY) has_splicing, has_labeling, splicing_labeling, _ = detect_datatype(adata) if has_splicing and has_labeling and splicing_labeling: layers = ["X_new", "X_total", "X_uu", "X_ul", "X_su", "X_sl"] elif has_labeling: layers = ["X_new", "X_total"] if not has_labeling: raise Exception( "This recipe is only applicable to kinetics experiment datasets that have " "labeling data (at least either with `'uu', 'ul', 'su', 'sl'` or `'new', 'total'` " "layers." ) if has_splicing and has_labeling: recipe_monocle( adata, tkey=tkey, experiment_type="mix_pulse_chase", reset_X=reset_X, X_total_layers=X_total_layers, splicing_total_layers=splicing_total_layers, n_top_genes=n_top_genes, total_layers=True, keep_filtered_cells=keep_filtered_cells, keep_filtered_genes=keep_filtered_genes, keep_raw_layers=keep_raw_layers, **rm_kwargs, ) tkey = adata.uns["pp"]["tkey"] moments(adata, group=tkey, layers=layers) CM = np.log1p(adata[:, adata.var.use_for_pca].layers["X_spliced"].A) cm_genesums = CM.sum(axis=0) valid_ind = np.logical_and(np.isfinite(cm_genesums), cm_genesums != 0) valid_ind = np.array(valid_ind).flatten() pca(adata, CM[:, valid_ind], pca_key="X_spliced_pca") neighbors(adata, X_data=adata.obsm["X_spliced_pca"], layer="X_spliced") conn = normalize_knn_graph(adata.obsp["connectivities"] > 0) moments(adata, conn=conn, layers=["X_spliced", "X_unspliced"]) dynamics( adata, model="deterministic", est_method="twostep", del_2nd_moments=del_2nd_moments, ) reduceDimension(adata, reduction_method=basis) cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis) else: recipe_monocle( adata, tkey=tkey, experiment_type="mix_pulse_chase", reset_X=reset_X, X_total_layers=X_total_layers, splicing_total_layers=splicing_total_layers, n_top_genes=n_top_genes, total_layers=True, keep_filtered_cells=keep_filtered_cells, keep_filtered_genes=keep_filtered_genes, keep_raw_layers=keep_raw_layers, **rm_kwargs, ) dynamics( adata, model="deterministic", est_method="twostep", del_2nd_moments=del_2nd_moments, ) reduceDimension(adata, reduction_method=basis) cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis) return adata def recipe_onde_shot_data( adata, tkey=None, reset_X=True, X_total_layers=False, splicing_total_layers=False, n_top_genes=1000, keep_filtered_cells=None, keep_filtered_genes=None, keep_raw_layers=None, one_shot_method="sci-fate", del_2nd_moments=None, ekey="M_t", vkey="velocity_T", basis="umap", rm_kwargs={}, ): from ..preprocessing import recipe_monocle from ..preprocessing.utils import pca, detect_datatype keep_filtered_cells = DynamoAdataConfig.check_config_var( keep_filtered_cells, DynamoAdataConfig.RECIPE_KEEP_FILTERED_CELLS_KEY ) keep_filtered_genes = DynamoAdataConfig.check_config_var( keep_filtered_genes, DynamoAdataConfig.RECIPE_KEEP_FILTERED_GENES_KEY ) keep_raw_layers = DynamoAdataConfig.check_config_var(keep_raw_layers, DynamoAdataConfig.RECIPE_KEEP_RAW_LAYERS_KEY) del_2nd_moments = DynamoAdataConfig.check_config_var(del_2nd_moments, DynamoAdataConfig.RECIPE_DEL_2ND_MOMENTS_KEY) has_splicing, has_labeling, splicing_labeling, _ = detect_datatype(adata) if has_splicing and has_labeling and splicing_labeling: layers = ["X_new", "X_total", "X_uu", "X_ul", "X_su", "X_sl"] elif has_labeling: layers = ["X_new", "X_total"] if not has_labeling: raise Exception( "This recipe is only applicable to kinetics experiment datasets that have " "labeling data (at least either with `'uu', 'ul', 'su', 'sl'` or `'new', 'total'` " "layers." ) if has_splicing and has_labeling: recipe_monocle( adata, tkey=tkey, experiment_type="one-shot", reset_X=reset_X, X_total_layers=X_total_layers, splicing_total_layers=splicing_total_layers, n_top_genes=n_top_genes, total_layers=True, keep_filtered_cells=keep_filtered_cells, keep_filtered_genes=keep_filtered_genes, keep_raw_layers=keep_raw_layers, **rm_kwargs, ) tkey = adata.uns["pp"]["tkey"] moments(adata, group=tkey, layers=layers) CM = np.log1p(adata[:, adata.var.use_for_pca].layers["X_spliced"].A) cm_genesums = CM.sum(axis=0) valid_ind = np.logical_and(np.isfinite(cm_genesums), cm_genesums != 0) valid_ind = np.array(valid_ind).flatten() pca(adata, CM[:, valid_ind], pca_key="X_spliced_pca") neighbors(adata, X_data=adata.obsm["X_spliced_pca"], layer="X_spliced") conn = normalize_knn_graph(adata.obsp["connectivities"] > 0) moments(adata, conn=conn, layers=["X_spliced", "X_unspliced"]) dynamics( adata, model="deterministic", one_shot_method=one_shot_method, del_2nd_moments=del_2nd_moments, ) reduceDimension(adata, reduction_method=basis) cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis) else: recipe_monocle( adata, tkey=tkey, experiment_type="one-shot", reset_X=reset_X, X_total_layers=X_total_layers, splicing_total_layers=splicing_total_layers, n_top_genes=n_top_genes, total_layers=True, keep_filtered_cells=keep_filtered_cells, keep_filtered_genes=keep_filtered_genes, keep_raw_layers=keep_raw_layers, **rm_kwargs, ) dynamics( adata, model="deterministic", one_shot_method=one_shot_method, del_2nd_moments=del_2nd_moments, ) reduceDimension(adata, reduction_method=basis) cell_velocities(adata, enforce=True, vkey=vkey, ekey=ekey, basis=basis) return adata
BSD 3-Clause New or Revised License
cutright/imrt-qa-data-miner
IQDM/utilities.py
moving_avg
python
def moving_avg(xyw, avg_len): cumsum, moving_aves, x_final = [0], [], [] for i, y in enumerate(xyw['y'], 1): cumsum.append(cumsum[i - 1] + y / xyw['w'][i - 1]) if i >= avg_len: moving_ave = (cumsum[i] - cumsum[i - avg_len]) / avg_len moving_aves.append(moving_ave) x_final = [xyw['x'][i] for i in range(avg_len - 1, len(xyw['x']))] return x_final, moving_aves
Calculate a moving average for a given averaging length :param xyw: output from collapse_into_single_dates :type xyw: dict :param avg_len: average of these number of points, i.e., look-back window :type avg_len: int :return: list of x values, list of y values :rtype: tuple
https://github.com/cutright/imrt-qa-data-miner/blob/85abf9dc66a139c02574c386377f46f0944c5893/IQDM/utilities.py#L120-L139
from os.path import isdir, join, splitext, normpath from os import walk, listdir import zipfile from datetime import datetime from dateutil.parser import parse as date_parser import numpy as np import codecs DELIMITER = ',' ALTERNATE = '^' def are_all_strings_in_text(text, list_of_strings): for str_to_find in list_of_strings: if str_to_find not in text: return False return True def get_csv(data, columns): clean_csv = [str(data[column]).replace(DELIMITER, ALTERNATE) for column in columns] return DELIMITER.join(clean_csv) def load_csv_file(file_path): with codecs.open(file_path, 'r', encoding='utf-8', errors='ignore') as doc: return [line.split(',') for line in doc] def import_csv(file_path, day_first=False): raw_data = load_csv_file(file_path) keys = raw_data.pop(0) keys = [key.strip() for key in keys if key.strip()] + ['file_name'] data = {key: [] for key in keys} for row in raw_data: for col, key in enumerate(keys): data[key].append(row[col]) sorted_data = {key: [] for key in keys} sorted_data['date_time_obj'] = [] date_time_objs = get_date_times(data, day_first=day_first) for i in get_sorted_indices(date_time_objs): for key in keys: sorted_data[key].append(data[key][i]) sorted_data['date_time_obj'].append(date_time_objs[i]) return sorted_data def get_file_names_from_csv_file(file_path): raw_data = load_csv_file(file_path) column_headers = raw_data.pop(0) fp_start = len(column_headers) file_names = [] for row in raw_data: file_name_fields = [value for value in row[fp_start:]] file_name = ','.join(file_name_fields) file_names.append(normpath(file_name.strip())) return file_names def collapse_into_single_dates(x, y): x_collapsed = [x[0]] y_collapsed = [y[0]] w_collapsed = [1] for n in range(1, len(x)): if x[n] == x_collapsed[-1]: y_collapsed[-1] = (y_collapsed[-1] + y[n]) w_collapsed[-1] += 1 else: x_collapsed.append(x[n]) y_collapsed.append(y[n]) w_collapsed.append(1) return {'x': x_collapsed, 'y': y_collapsed, 'w': w_collapsed}
MIT License
nastools/homeassistant
homeassistant/components/light/demo.py
DemoLight.__init__
python
def __init__( self, name, state, rgb=None, ct=None, brightness=180, xy_color=(.5, .5), white=200, effect_list=None, effect=None): self._name = name self._state = state self._rgb = rgb self._ct = ct or random.choice(LIGHT_TEMPS) self._brightness = brightness self._xy_color = xy_color self._white = white self._effect_list = effect_list self._effect = effect
Initialize the light.
https://github.com/nastools/homeassistant/blob/7ca1180bd42713f2d77bbc3f0b27b231ba8784aa/homeassistant/components/light/demo.py#L41-L53
import random from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_RGB_COLOR, ATTR_WHITE_VALUE, ATTR_XY_COLOR, SUPPORT_BRIGHTNESS, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_RGB_COLOR, SUPPORT_WHITE_VALUE, Light) LIGHT_COLORS = [ [237, 224, 33], [255, 63, 111], ] LIGHT_EFFECT_LIST = ['rainbow', 'none'] LIGHT_TEMPS = [240, 380] SUPPORT_DEMO = (SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT | SUPPORT_RGB_COLOR | SUPPORT_WHITE_VALUE) def setup_platform(hass, config, add_devices_callback, discovery_info=None): add_devices_callback([ DemoLight("Bed Light", False, effect_list=LIGHT_EFFECT_LIST, effect=LIGHT_EFFECT_LIST[0]), DemoLight("Ceiling Lights", True, LIGHT_COLORS[0], LIGHT_TEMPS[1]), DemoLight("Kitchen Lights", True, LIGHT_COLORS[1], LIGHT_TEMPS[0]) ]) class DemoLight(Light):
MIT License
minio/minio-py
minio/helpers.py
check_non_empty_string
python
def check_non_empty_string(string): try: if not string.strip(): raise ValueError() except AttributeError as exc: raise TypeError() from exc
Check whether given string is not empty.
https://github.com/minio/minio-py/blob/77c3e4086f4d4f3fc5f09b52efe03797ef418926/minio/helpers.py#L216-L222
from __future__ import absolute_import, division, unicode_literals import base64 import errno import hashlib import math import os import re import urllib.parse from queue import Queue from threading import BoundedSemaphore, Thread from .sse import Sse, SseCustomerKey from .time import to_iso8601utc MAX_MULTIPART_COUNT = 10000 MAX_MULTIPART_OBJECT_SIZE = 5 * 1024 * 1024 * 1024 * 1024 MAX_PART_SIZE = 5 * 1024 * 1024 * 1024 MIN_PART_SIZE = 5 * 1024 * 1024 _VALID_BUCKETNAME_REGEX = re.compile( '^[A-Za-z0-9][A-Za-z0-9\\.\\-\\_\\:]{1,61}[A-Za-z0-9]$') _VALID_BUCKETNAME_STRICT_REGEX = re.compile( '^[a-z0-9][a-z0-9\\.\\-]{1,61}[a-z0-9]$') _VALID_IP_ADDRESS = re.compile( r'^(\d+\.){3}\d+$') _ALLOWED_HOSTNAME_REGEX = re.compile( '^((?!-)(?!_)[A-Z_\\d-]{1,63}(?<!-)(?<!_)\\.)*((?!_)(?!-)' + '[A-Z_\\d-]{1,63}(?<!-)(?<!_))$', re.IGNORECASE) _EXTRACT_REGION_REGEX = re.compile('s3[.-]?(.+?).amazonaws.com') def quote(resource, safe='/', encoding=None, errors=None): return urllib.parse.quote( resource, safe=safe, encoding=encoding, errors=errors, ).replace("%7E", "~") def queryencode(query, safe='', encoding=None, errors=None): return quote(query, safe, encoding, errors) def headers_to_strings(headers, titled_key=False): def _get_key(key): return key.title() if titled_key else key def _get_value(value): return re.sub( r"Credential=([^/]+)", "Credential=*REDACTED*", re.sub( r"Signature=([0-9a-f]+)", "Signature=*REDACTED*", value if isinstance(value, str) else str(value), ), ) if titled_key else value return "\n".join( [ f"{_get_key(key)}: {_get_value(value)}" for key, value in headers.items() ] ) def _validate_sizes(object_size, part_size): if part_size > 0: if part_size < MIN_PART_SIZE: raise ValueError( f"part size {part_size} is not supported; minimum allowed 5MiB", ) if part_size > MAX_PART_SIZE: raise ValueError( f"part size {part_size} is not supported; minimum allowed 5GiB", ) if object_size >= 0: if object_size > MAX_MULTIPART_OBJECT_SIZE: raise ValueError( f"object size {object_size} is not supported; " f"maximum allowed 5TiB" ) elif part_size <= 0: raise ValueError( "valid part size must be provided when object size is unknown", ) def _get_part_info(object_size, part_size): _validate_sizes(object_size, part_size) if object_size < 0: return part_size, -1 if part_size > 0: part_size = min(part_size, object_size) return part_size, math.ceil(object_size / part_size) if part_size else 1 part_size = math.ceil( math.ceil(object_size / MAX_MULTIPART_COUNT) / MIN_PART_SIZE, ) * MIN_PART_SIZE return part_size, math.ceil(object_size / part_size) if part_size else 1 def get_part_info(object_size, part_size): part_size, part_count = _get_part_info(object_size, part_size) if part_count > MAX_MULTIPART_COUNT: raise ValueError( f"object size {object_size} and part size {part_size} " f"make more than {MAX_MULTIPART_COUNT} parts for upload" ) return part_size, part_count def read_part_data(stream, size, part_data=b'', progress=None): size -= len(part_data) while size: data = stream.read(size) if not data: break if not isinstance(data, bytes): raise ValueError("read() must return 'bytes' object") part_data += data size -= len(data) if progress: progress.update(len(data)) return part_data def makedirs(path): try: if path: os.makedirs(path) except OSError as exc: if exc.errno != errno.EEXIST: raise if not os.path.isdir(path): raise ValueError(f"path {path} is not a directory") from exc def check_bucket_name(bucket_name, strict=False): bucket_name = str(bucket_name).strip() if not bucket_name: raise ValueError('Bucket name cannot be empty.') if len(bucket_name) < 3: raise ValueError('Bucket name cannot be less than' ' 3 characters.') if len(bucket_name) > 63: raise ValueError('Bucket name cannot be greater than' ' 63 characters.') match = _VALID_IP_ADDRESS.match(bucket_name) if match: raise ValueError('Bucket name cannot be an ip address') unallowed_successive_chars = ['..', '.-', '-.'] if any(x in bucket_name for x in unallowed_successive_chars): raise ValueError('Bucket name contains invalid ' 'successive chars ' + str(unallowed_successive_chars) + '.') if strict: match = _VALID_BUCKETNAME_STRICT_REGEX.match(bucket_name) if (not match) or match.end() != len(bucket_name): raise ValueError('Bucket name contains invalid ' 'characters (strictly enforced).') match = _VALID_BUCKETNAME_REGEX.match(bucket_name) if (not match) or match.end() != len(bucket_name): raise ValueError( f"Bucket name does not follow S3 standards. Bucket: {bucket_name}", )
Apache License 2.0
aceinna/python-openimu
tools/bootloader.py
OpenIMU.set_fields
python
def set_fields(self, field_value_pairs, ws=False): self.set_quiet() num_fields = len(field_value_pairs) C = [0x55, 0x55, ord('S'), ord('F'), num_fields * 4 + 1, num_fields] FIELD = 0 VALUE = 1 for field_value in field_value_pairs: if field_value[FIELD] == 1: self.odr_setting = field_value[VALUE] field_msb = (field_value[FIELD] & 0xFF00) >> 8 field_lsb = field_value[FIELD] & 0x00FF value_msb = (field_value[VALUE] & 0xFF00) >> 8 value_lsb = field_value[VALUE] & 0x0FF C.insert(len(C), field_msb) C.insert(len(C), field_lsb) C.insert(len(C), value_msb) C.insert(len(C), value_lsb) crc = self.calc_crc(C[2:C[4]+5]) crc_msb = (crc & 0xFF00) >> 8 crc_lsb = (crc & 0x00FF) C.insert(len(C), crc_msb) C.insert(len(C), crc_lsb) self.write(C) R = self.read(num_fields * 2 + 1 + 7) data = [] if R[0] == 85 and R[1] == 85: packet_crc = 256 * R[-2] + R[-1] if self.calc_crc(R[2:R[4]+5]) == packet_crc: if R[2] == 0 and R[3] == 0: print('SET FIELD ERROR/FAILURE') return else: self.packet_type = '{0:1c}'.format( R[2]) + '{0:1c}'.format(R[3]) data = self.parse_packet(R[5:R[4]+5], ws) return data
Executes 380 SF command for an array of fields, value pairs. SF Command sets Temporary setting for fields on 380
https://github.com/aceinna/python-openimu/blob/5cb6d35fb683d5847c0d268c71cdf63c895c5d34/tools/bootloader.py#L340-L378
import serial import math import string import time import sys import collections import glob class OpenIMU: def __init__(self, ws=False): self.ws = ws self.ser = None self.synced = 0 self.stream_mode = 0 self.device_id = 0 self.connected = 0 self.odr_setting = 0 self.logging = 0 self.logger = None self.packet_size = 0 self.packet_type = 0 self.elapsed_time_sec = 0 self.data = {} self.filename = sys.argv[1] self.ID = '' self.port = 'none' self.boot = 0 def find_device(self): while not self.autobaud(self.find_ports()): time.sleep(2) def find_ports(self): print('scanning ports') if sys.platform.startswith('win'): ports = ['COM%s' % (i + 1) for i in range(256)] elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'): ports = glob.glob('/dev/tty[A-Za-z]*') elif sys.platform.startswith('darwin'): ports = glob.glob('/dev/tty.*') else: raise EnvironmentError('Unsupported platform') result = [] for port in ports: try: print('Trying: ' + port) s = serial.Serial(port) s.close() result.append(port) except (OSError, serial.SerialException): pass return result def autobaud(self, ports): for port in ports: for baud in [230400, 115200, 57600, 38400]: self.open(port, baud) self.sync() if self.stream_mode: print('Connected Stream Mode ' + '{0:d}'.format(baud) + ' ' + port) break else: self.ser.close() if self.stream_mode == 0: for baud in [230400, 115200, 57600, 38400]: self.open(port, baud) self.device_id = self.get_id_str() if self.device_id: print('Connected Polled Mode ' + '{0:d}'.format(baud)) odr = self.read_fields([0x0001], 1) if odr: print('Saved ODR: ' + '{0:d}'.format(odr[0][1])) self.odr_setting = odr[0][1] self.connected = 1 self.port = port return True else: self.close() else: odr = self.read_fields([0x0001], 1) if odr: print('Current ODR: ' + '{0:d}'.format(odr[0][1])) self.odr_setting = odr[0][1] self.device_id = self.get_id_str() self.restore_odr() self.connected = 1 return True else: print('failed to get id string') return False return False def get_latest(self): if self.stream_mode == 1: return self.data else: return {'error': 'not streaming'} def start_log(self, data): self.logging = 1 self.logger = file_storage.LogIMU380Data(self, data) if self.ws == False and self.odr_setting != 0: self.connect() def stop_log(self): self.logging = 0 self.logger.close() self.logger = None def ping_test(self): self.stream_mode = 0 C = [0x55, 0x55, 0x50, 0x4B, 0x00] crc = self.calc_crc(C[2:4] + [0x00]) crc_msb = (crc & 0xFF00) >> 8 crc_lsb = (crc & 0x00FF) C.insert(len(C), crc_msb) C.insert(len(C), crc_lsb) self.reset_buffer() self.write(C) R = self.read(7) if R == bytearray(C): return True else: return False def get_fields(self, fields, ws=False): self.set_quiet() num_fields = len(fields) C = [0x55, 0x55, ord('G'), ord('F'), num_fields * 2 + 1, num_fields] for field in fields: field_msb = (field & 0xFF00) >> 8 field_lsb = field & 0x00FF C.insert(len(C), field_msb) C.insert(len(C), field_lsb) crc = self.calc_crc(C[2:C[4]+5]) crc_msb = (crc & 0xFF00) >> 8 crc_lsb = (crc & 0x00FF) C.insert(len(C), crc_msb) C.insert(len(C), crc_lsb) self.write(C) R = self.read(num_fields * 4 + 1 + 7) data = [] if R and R[0] == 85 and R[1] == 85: packet_crc = 256 * R[-2] + R[-1] calc_crc = self.calc_crc(R[2:R[4]+5]) if packet_crc == calc_crc: self.packet_type = '{0:1c}'.format( R[2]) + '{0:1c}'.format(R[3]) data = self.parse_packet(R[5:R[4]+5], ws) return data def read_fields(self, fields, ws=False): self.set_quiet() num_fields = len(fields) C = [0x55, 0x55, ord('R'), ord('F'), num_fields * 2 + 1, num_fields] for field in fields: field_msb = (field & 0xFF00) >> 8 field_lsb = field & 0x00FF C.insert(len(C), field_msb) C.insert(len(C), field_lsb) crc = self.calc_crc(C[2:C[4]+5]) crc_msb = (crc & 0xFF00) >> 8 crc_lsb = (crc & 0x00FF) C.insert(len(C), crc_msb) C.insert(len(C), crc_lsb) self.write(C) R = self.read(num_fields * 4 + 1 + 7) data = [] if len(R) and R[0] == 85 and R[1] == 85: packet_crc = 256 * R[-2] + R[-1] calc_crc = self.calc_crc(R[2:R[4]+5]) if packet_crc == calc_crc: self.packet_type = '{0:1c}'.format( R[2]) + '{0:1c}'.format(R[3]) data = self.parse_packet(R[5:R[4]+5], ws) return data def write_fields(self, field_value_pairs, ws=False): self.set_quiet() num_fields = len(field_value_pairs) C = [0x55, 0x55, ord('W'), ord('F'), num_fields * 4 + 1, num_fields] FIELD = 0 VALUE = 1 for field_value in field_value_pairs: field_msb = (field_value[FIELD] & 0xFF00) >> 8 field_lsb = field_value[FIELD] & 0x00FF if isinstance(field_value[VALUE], int): value_msb = (field_value[VALUE] & 0xFF00) >> 8 value_lsb = field_value[VALUE] & 0x0FF elif isinstance(field_value[VALUE], str): value_msb = ord(field_value[VALUE][0]) value_lsb = ord(field_value[VALUE][1]) C.insert(len(C), field_msb) C.insert(len(C), field_lsb) C.insert(len(C), value_msb) C.insert(len(C), value_lsb) crc = self.calc_crc(C[2:C[4]+5]) crc_msb = (crc & 0xFF00) >> 8 crc_lsb = (crc & 0x00FF) C.insert(len(C), crc_msb) C.insert(len(C), crc_lsb) self.write(C) time.sleep(1.0) R = self.read(num_fields * 2 + 1 + 7) print(R) data = [] if R[0] == 85 and R[1] == 85: packet_crc = 256 * R[-2] + R[-1] if self.calc_crc(R[2:R[4]+5]) == packet_crc: if R[2] == 0 and R[3] == 0: print('SET FIELD ERROR/FAILURE') return else: self.packet_type = '{0:1c}'.format( R[2]) + '{0:1c}'.format(R[3]) data = self.parse_packet(R[5:R[4]+5], ws) return data
Apache License 2.0
itisfoundation/osparc-simcore
packages/postgres-database/src/simcore_postgres_database/cli.py
info
python
def info(): click.echo("Using alembic {}.{}.{}".format(*alembic_version)) cfg = _load_cache() click.echo(f"Saved config: {hide_dict_pass(cfg)} @ {discovered_cache}") config = _get_alembic_config_from_cache(cfg) if config: click.echo("Revisions history ------------") alembic.command.history(config) click.echo("Current version: -------------") alembic.command.current(config, verbose=True)
Displays discovered config and other alembic infos
https://github.com/itisfoundation/osparc-simcore/blob/a50b61735381231abba2cfcd57f3314785c656b0/packages/postgres-database/src/simcore_postgres_database/cli.py#L233-L244
import json import json.decoder import logging import os import sys from copy import deepcopy from logging.config import fileConfig from pathlib import Path from typing import Dict, Optional import alembic.command import click import docker from alembic import __version__ as __alembic_version__ from alembic.config import Config as AlembicConfig from simcore_postgres_database.models import * from simcore_postgres_database.utils import ( build_url, hide_dict_pass, hide_url_pass, raise_if_not_responsive, ) from tenacity import Retrying, after_log, wait_fixed alembic_version = tuple(int(v) for v in __alembic_version__.split(".")[0:3]) current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).parent.resolve() default_ini = current_dir / "alembic.ini" migration_dir = current_dir / "migration" discovered_cache = os.path.expanduser("~/.simcore_postgres_database_cache.json") log = logging.getLogger("root") if __name__ == "__main__": fileConfig(default_ini) def safe(if_fails_return=False): def decorate(func): def safe_func(*args, **kargs): try: res = func(*args, **kargs) return res except RuntimeError as err: log.info( "%s failed: %s", func.__name__, str(err), exc_info=True, stack_info=True, ) except Exception: log.info( "%s failed unexpectedly", func.__name__, exc_info=True, stack_info=True, ) return deepcopy(if_fails_return) return safe_func return decorate @safe(if_fails_return=None) def _get_service_published_port(service_name: str) -> int: client = docker.from_env() services = [x for x in client.services.list() if service_name in x.name] if not services: raise RuntimeError( "Cannot find published port for service '%s'. Probably services still not up" % service_name ) service_endpoint = services[0].attrs["Endpoint"] if "Ports" not in service_endpoint or not service_endpoint["Ports"]: raise RuntimeError( "Cannot find published port for service '%s' in endpoint. Probably services still not up" % service_name ) published_port = service_endpoint["Ports"][0]["PublishedPort"] return int(published_port) def _get_alembic_config_from_cache( force_cfg: Optional[Dict] = None, ) -> Optional[AlembicConfig]: try: if force_cfg: cfg = force_cfg else: cfg = _load_cache(raise_if_error=True) url = build_url(**cfg) except Exception: log.debug( "Cannot open cache or cannot build URL", exc_info=True, stack_info=True ) click.echo("Invalid database config, please run discover first", err=True) _reset_cache() return None config = AlembicConfig(default_ini) config.set_main_option("script_location", str(migration_dir)) config.set_main_option("sqlalchemy.url", str(url)) return config def _load_cache(*, raise_if_error=False) -> Dict: try: with open(discovered_cache) as fh: cfg = json.load(fh) except (FileNotFoundError, json.decoder.JSONDecodeError): if raise_if_error: raise return {} return cfg def _reset_cache(): if os.path.exists(discovered_cache): os.remove(discovered_cache) click.echo("Removed %s" % discovered_cache) DEFAULT_HOST = "postgres" DEFAULT_PORT = 5432 DEFAULT_DB = "simcoredb" @click.group() def main(): @main.command() @click.option("--user", "-u") @click.option("--password", "-p") @click.option("--host") @click.option("--port", type=int) @click.option("--database", "-d") def discover(**cli_inputs) -> Optional[Dict]: click.echo("Discovering database ...") cli_cfg = {key: value for key, value in cli_inputs.items() if value is not None} def _test_cached() -> Dict: cfg = _load_cache(raise_if_error=True) cfg.update(cli_cfg) return cfg def _test_env() -> Dict: cfg = { "user": os.getenv("POSTGRES_USER"), "password": os.getenv("POSTGRES_PASSWORD"), "host": os.getenv("POSTGRES_HOST", DEFAULT_HOST), "port": int(os.getenv("POSTGRES_PORT") or DEFAULT_PORT), "database": os.getenv("POSTGRES_DB", DEFAULT_DB), } cfg.update(cli_cfg) return cfg def _test_swarm() -> Dict: cfg = _test_env() cfg["host"] = "127.0.0.1" cfg["port"] = _get_service_published_port(cli_cfg.get("host", DEFAULT_HOST)) cfg.setdefault("database", DEFAULT_DB) return cfg for test in [_test_cached, _test_env, _test_swarm]: try: click.echo("-> {0.__name__}: {0.__doc__}".format(test)) cfg: Dict = test() cfg.update(cli_cfg) url = build_url(**cfg) click.echo(f"ping {test.__name__}: {hide_url_pass(url)} ...") raise_if_not_responsive(url, verbose=False) print("Saving config ") click.echo(f"Saving config at {discovered_cache}: {hide_dict_pass(cfg)}") with open(discovered_cache, "wt") as fh: json.dump(cfg, fh, sort_keys=True, indent=4) print("Saving config at ") click.secho( f"{test.__name__} succeeded: {hide_url_pass(url)} is online", blink=False, bold=True, fg="green", ) return cfg except Exception as err: inline_msg = str(err).replace("\n", ". ") click.echo(f"<- {test.__name__} failed : {inline_msg}") _reset_cache() click.secho("Sorry, database not found !!", blink=False, bold=True, fg="red") return None @main.command()
MIT License
phype/purei9_unofficial
src/purei9_unofficial/common.py
AbstractRobot.stopclean
python
def stopclean(self): raise Exception("Not implemented")
Tell the Robot to stop cleaning
https://github.com/phype/purei9_unofficial/blob/fd07b1d637f9310a394e3290d9f9478078581155/src/purei9_unofficial/common.py#L92-L94
from enum import Enum BatteryStatus = { 1: "Dead", 2: "CriticalLow", 3: "Low", 4: "Medium", 5: "Normal", 6: "High", } RobotStates = { 1: "Cleaning", 2: "Paused Cleaning", 3: "Spot Cleaning", 4: "Paused Spot Cleaning", 5: "Return", 6: "Paused Return", 7: "Return for Pitstop", 8: "Paused Return for Pitstop", 9: "Charging", 10: "Sleeping", 11: "Error", 12: "Pitstop", 13: "Manual Steering", 14: "Firmware Upgrade" } class PowerMode(Enum): LOW = 1 MEDIUM = 2 HIGH = 3 class ZoneType(Enum): clean = 0 avoid = 1 class CleaningSessionResult(Enum): unknown0 = 0 unknown1 = 1 unknown2 = 2 unknown3 = 3 endedNotFindingCharger = 4 unknown5 = 4 unknown6 = 5 unknown7 = 6 unknown8 = 7 cleaningFinishedSuccessfulInCharger = 9 cleaningFinishedSuccessfulInStartPose = 10 abortedByUser = 11 unknown12 = 12 unknown13 = 13 unknown14 = 14 unknown15 = 15 unknown16 = 16 unknown17 = 17 class CleaningSession: def __init__(self, starttime, duration, cleandearea, endstatus=None, imageurl=None, mapid=None, error=None): self.starttime = starttime self.duration = duration self.cleandearea = cleandearea self.endstatus = endstatus self.imageurl = imageurl self.mapid = mapid self.error = error class AbstractRobot: def __init__(self): pass def getid(self): raise Exception("Not implemented") def getstatus(self): raise Exception("Not implemented") def getfirmware(self): raise Exception("Not implemented") def getname(self): raise Exception("Not implemented") def startclean(self): raise Exception("Not implemented")
MIT License
edisonleeeee/graphgallery
graphgallery/gallery/trainer.py
Trainer.build
python
def build(self, **kwargs): if self._graph is None: raise RuntimeError("Please call 'trainer.setup_graph(graph)' first.") use_tfn = kwargs.get("use_tfn", True) if self.backend == "tensorflow": with self.backend.device(self.device): self.model, kwargs = gf.wrapper(self.model_step)(**kwargs) if use_tfn: self.model.use_tfn() else: kwargs.pop("use_tfn", None) model, kwargs = gf.wrapper(self.model_step)(**kwargs) self.model = model.to(self.device) self.cfg.model.merge_from_dict(kwargs) return self
This method is used for build your model, which accepts only keyword arguments in your defined method 'model_step'. Note: ----- This method should be called after `process`. Commonly used keyword arguments: -------------------------------- hids: int or a list of them, hidden units for each hidden layer. acts: string or a list of them, activation functions for each layer. dropout: float scalar, dropout used in the model. lr: float scalar, learning rate used for the model. weight_decay: float scalar, weight decay used for the model weights. bias: bool, whether to use bias in each layer. use_tfn: bool, this argument is only used for TensorFlow backend, if `True`, it will decorate the model training and testing with `tf.function` (See `graphgallery.nn.modes.tf_engine.TFEngine`). By default, it was `True`, which can accelerate the training and inference, by it may cause several errors. other arguments (if have) will be passed into your method 'model_step'.
https://github.com/edisonleeeee/graphgallery/blob/4eec9c5136bda14809bd22584b26cc346cdb633b/graphgallery/gallery/trainer.py#L161-L204
import os import sys import warnings import importlib import inspect import os.path as osp import numpy as np from graphgallery.gallery.callbacks import ModelCheckpoint, CallbackList, EarlyStopping, ProgbarLogger, History from torch.utils.data import DataLoader, Dataset import graphgallery as gg from graphgallery import functional as gf from graphgallery.data.io import makedirs_from_filepath from graphgallery.gallery import Model from graphgallery.utils import Progbar warnings.filterwarnings( 'ignore', message='.*Converting sparse IndexedSlices to a dense Tensor of unknown shape.*') warnings.filterwarnings( 'ignore', message='.*to a dense Tensor of unknown shape.*') def format_doc(d): msg = "" for i, (k, v) in enumerate(d.items()): if v != "UNSPECIDIED": msg += f"({i + 1}) `{k}`, Default is `{v}` \n" else: msg += f"({i + 1}) `{k}`, UNSPECIDIED position argument\n" return msg def get_doc_dict(func): ArgSpec = inspect.getfullargspec(func) args = ArgSpec.args if ArgSpec.args else [] args = args[1:] if args[0] == "self" else args defaults = ArgSpec.defaults if ArgSpec.defaults else [] delta_l = len(args) - len(defaults) defaults = ["UNSPECIDIED"] * delta_l + list(defaults) d = dict(zip(args, defaults)) return d def make_docs(*func): d = {} for f in func: d.update(get_doc_dict(f)) return format_doc(d) def unravel_batch(batch): inputs = labels = out_index = None if isinstance(batch, (list, tuple)): inputs = batch[0] labels = batch[1] if len(batch) > 2: out_index = batch[-1] else: inputs = batch if isinstance(labels, (list, tuple)) and len(labels) == 1: labels = labels[0] if isinstance(out_index, (list, tuple)) and len(out_index) == 1: out_index = out_index[0] return inputs, labels, out_index class Trainer(Model): def setup_cfg(self): try: task_module = self.__module__.split('.')[2] gallery_module = '.'.join(__name__.split('.')[:-1]) except: default_setup = gg.gallery.nodeclas.default default_setup.default_cfg_setup(self.cfg) print('Something error when finding default setup file. Using Node Classificatioon config as default', file=sys.stderr) return try: default_setup = importlib.import_module(f".{task_module}.default", gallery_module) except ModuleNotFoundError: raise RuntimeError(f"default setup function `{gallery_module}.{task_module}.default.default_cfg_setup` not found!") default_setup.default_cfg_setup(self.cfg) @np.deprecate(old_name="make_data", message=("the method `trainer.make_data` is currently deprecated from 0.9.0," " please use `trainer.setup_graph` instead.")) def make_data(self, *args, **kwargs): return self.setup_graph(*args, **kwargs) def setup_graph(self, graph, graph_transform=None, device=None, **kwargs): self.empty_cache() model = self.model if model is not None and hasattr(model, 'empty_cache'): model.empty_cache() self.graph = gf.get(graph_transform)(graph) cfg = self.cfg.data if device is not None: self.data_device = gf.device(device, self.backend) else: self.data_device = self.device cfg.device = device _, kwargs = gf.wrapper(self.data_step)(**kwargs) kwargs['graph_transform'] = graph_transform cfg.merge_from_dict(kwargs) for k, v in kwargs.items(): if k.endswith("transform"): setattr(self.transform, k, gf.get(v)) return self def data_step(self, *args, **kwargs): raise NotImplementedError
MIT License
ascribe/pyspool
spool/spool.py
Spool.__init__
python
def __init__(self, testnet=False, service='blockr', username='', password='', host='', port='', fee=None, token=None): self.testnet = testnet self._netcode = 'XTN' if testnet else 'BTC' self._t = Transactions(service=service, testnet=testnet, username=username, password=password, host=host, port=port) self._spents = Queue(maxsize=self.SPENTS_QUEUE_MAXSIZE) self.fee = fee or self.FEE self.token = token or self.TOKEN
Args: testnet (bool): Whether to use the mainnet or testnet. Defaults to the mainnet (:const:`False`). service (str): Bitcoin communication interface: ``'blockr'``, ``'daemon'``, or ``'regtest'``. ``'blockr'`` refers to the public api, whereas ``'daemon'`` and ``'regtest'`` refer to the jsonrpc inteface. Defaults to ``'blockr'``. username (str): username for jsonrpc communications password (str): password for jsonrpc communications hostname (str): hostname of the bitcoin node when using jsonrpc port (str): port number of the bitcoin node when using jsonrpc fee (int): transaction fee token (int): token
https://github.com/ascribe/pyspool/blob/f8b10df1e7d2ea7950dde433c1cb6d5225112f4f/spool/spool.py#L71-L96
from __future__ import absolute_import, division, unicode_literals from future import standard_library standard_library.install_aliases() from builtins import object, range from past.utils import old_div from queue import Queue from transactions import Transactions from .spoolverb import Spoolverb from .utils import dispatch class SpoolFundsError(Exception): def __init__(self, message): self.message = message def __str__(self): return self.message class Spool(object): FEE = 30000 TOKEN = 3000 SPENTS_QUEUE_MAXSIZE = 50
Apache License 2.0
f1ashhimself/uisoup
uisoup/interfaces/i_element.py
IElement.is_checked
python
def is_checked(self):
Indicates checked state.
https://github.com/f1ashhimself/uisoup/blob/c2e800ebabdbcef2dea8eecad1d01450bc636332/uisoup/interfaces/i_element.py#L100-L103
__author__ = 'f1ashhimself@gmail.com' import re from inspect import ismethod from types import FunctionType from abc import ABCMeta, abstractmethod, abstractproperty import xml.dom.minidom from ..utils.common import CommonUtils if CommonUtils.is_python_3(): unicode = str class IElement(object): __metaclass__ = ABCMeta @abstractmethod def click(self, x_offset=None, y_offset=None): @abstractmethod def right_click(self, x_offset=None, y_offset=None): @abstractmethod def double_click(self, x_offset=None, y_offset=None): @abstractmethod def drag_to(self, x, y, x_offset=None, y_offset=None, smooth=True): @abstractproperty def proc_id(self): @abstractproperty def is_top_level_window(self): @abstractproperty def is_selected(self): @abstractproperty
Apache License 2.0
nervanasystems/ngraph-neon
src/neon/util/generics.py
TypeMethods.get_handler
python
def get_handler(self, dispatch_type): try: return self.type_cache[dispatch_type] except KeyError: methods = [] for t in dispatch_type.__mro__: f = self while True: method, next_method_arg = f.methods.get(t, (None, None)) if next_method_arg is None: next_method_arg = f.next_method_arg if method: methods.append((next_method_arg, method)) extends = f.extends if extends is not None: f = extends.type_methods else: break def make_wrapped_method(next_method_arg, next_method, method): @wraps(method) def wrapped_method(*args, **kwargs): new_args = list(args[:next_method_arg]) new_args.append(next_method) new_args.extend(args[next_method_arg:]) return method(*new_args, **kwargs) return wrapped_method def next_method(*args, **kwargs): raise ValueError() for next_method_arg, method in reversed(methods): if next_method_arg is not None: next_method = make_wrapped_method(next_method_arg, next_method, method) else: next_method = method self.type_cache[dispatch_type] = next_method return next_method
Returns the method to handle dispatch_value. Args: dispatch_value: The argument that controls method selection. Returns: A callable.
https://github.com/nervanasystems/ngraph-neon/blob/8988ab90ee81c8b219ea5c374702e56d7f383302/src/neon/util/generics.py#L114-L179
from functools import wraps _use_default = object() class TypeMethods(object): def __init__(self, dispatch_type_fun, next_method_arg=None, dispatch_base_type=object, extends=None, **kvargs): super(TypeMethods, self).__init__(**kvargs) self.methods = {} self.type_cache = {} self.extends = extends self.super_type_methods = set() if extends is not None: self.extends.type_methods.super_type_methods.add(self) self.dispatch_base_type = dispatch_base_type self.dispatch_type_fun = dispatch_type_fun self.next_method_arg = next_method_arg def on_type_wrapper(self, generic_function, dispatch_type, next_method_arg): if next_method_arg is _use_default: next_method_arg = self.next_method_arg def add_method(method): if not issubclass(dispatch_type, self.dispatch_base_type): raise ValueError("Dispatch type {} must be a subclass of `{}`" .format(dispatch_type, self.dispatch_base_type)) self.methods[dispatch_type] = (method, next_method_arg) self.clear_cache() return generic_function return add_method def clear_cache(self): self.type_cache = {} for sup in self.super_type_methods: sup.clear_cache() self.__all_methods = None
Apache License 2.0
bemmu/pynamecheap
namecheap.py
Api.domains_getList
python
def domains_getList(self, ListType=None, SearchTerm=None, PageSize=None, SortBy=None): extra_payload = {'Page': 1} if ListType: extra_payload['ListType'] = ListType if SearchTerm: extra_payload['SearchTerm'] = SearchTerm if PageSize: extra_payload['PageSize'] = PageSize if SortBy: extra_payload['SortBy'] = SortBy payload, extra_payload = self._payload('namecheap.domains.getList', extra_payload) return self.LazyGetListIterator(self, payload)
Returns an iterable of dicts. Each dict represents one domain name the user has registered, for example { 'Name': 'coolestfriends.com', 'Created': '04/11/2012', 'Expires': '04/11/2018', 'ID': '8385859', 'AutoRenew': 'false', 'IsLocked': 'false', 'User': 'Bemmu', 'IsExpired': 'false', 'WhoisGuard': 'NOTPRESENT' }
https://github.com/bemmu/pynamecheap/blob/1657852993bb0a7bfdc07c44977c88409bc7753a/namecheap.py#L442-L471
import sys import time import requests from xml.etree.ElementTree import fromstring inPy3k = sys.version_info[0] == 3 ENDPOINTS = { 'sandbox': 'https://api.sandbox.namecheap.com/xml.response', 'production': 'https://api.namecheap.com/xml.response', } NAMESPACE = "http://api.namecheap.com/xml.response" DEFAULT_ATTEMPTS_COUNT = 1 DEFAULT_ATTEMPTS_DELAY = 0.1 class ApiError(Exception): def __init__(self, number, text): Exception.__init__(self, '%s - %s' % (number, text)) self.number = number self.text = text class Api(object): def __init__(self, ApiUser, ApiKey, UserName, ClientIP, sandbox=True, debug=True, attempts_count=DEFAULT_ATTEMPTS_COUNT, attempts_delay=DEFAULT_ATTEMPTS_DELAY): self.ApiUser = ApiUser self.ApiKey = ApiKey self.UserName = UserName self.ClientIP = ClientIP self.endpoint = ENDPOINTS['sandbox' if sandbox else 'production'] self.debug = debug self.payload_limit = 10 self.attempts_count = attempts_count self.attempts_delay = attempts_delay def domains_create( self, DomainName, FirstName, LastName, Address1, City, StateProvince, PostalCode, Country, Phone, EmailAddress, Address2=None, years=1, WhoisGuard=False ): contact_types = ['Registrant', 'Tech', 'Admin', 'AuxBilling'] extra_payload = { 'DomainName': DomainName, 'years': years } if WhoisGuard: extra_payload.update({ 'AddFreeWhoisguard': 'yes', 'WGEnabled': 'yes', }) for contact_type in contact_types: extra_payload.update({ '%sFirstName' % contact_type: FirstName, '%sLastName' % contact_type: LastName, '%sAddress1' % contact_type: Address1, '%sCity' % contact_type: City, '%sStateProvince' % contact_type: StateProvince, '%sPostalCode' % contact_type: PostalCode, '%sCountry' % contact_type: Country, '%sPhone' % contact_type: Phone, '%sEmailAddress' % contact_type: EmailAddress, }) if Address2: extra_payload['%sAddress2' % contact_type] = Address2 self._call('namecheap.domains.create', extra_payload) def _payload(self, Command, extra_payload={}): payload = { 'ApiUser': self.ApiUser, 'ApiKey': self.ApiKey, 'UserName': self.UserName, 'ClientIP': self.ClientIP, 'Command': Command, } if len(extra_payload) < self.payload_limit: payload.update(extra_payload) extra_payload = {} return payload, extra_payload def _fetch_xml(self, payload, extra_payload = None): attempts_left = self.attempts_count while attempts_left > 0: if extra_payload: r = requests.post(self.endpoint, params=payload, data=extra_payload) else: r = requests.post(self.endpoint, params=payload) if 200 <= r.status_code <= 299: break if attempts_left <= 1: raise ApiError('1', 'Did not receive 200 (Ok) response') if self.debug: print('Received status %d ... retrying ...' % (r.status_code)) time.sleep(self.attempts_delay) attempts_left -= 1 if self.debug: print("--- Request ---") print(r.url) print(extra_payload) print("--- Response ---") print(r.text) xml = fromstring(r.text) if xml.attrib['Status'] == 'ERROR': xpath = './/{%(ns)s}Errors/{%(ns)s}Error' % {'ns': NAMESPACE} error = xml.find(xpath) raise ApiError(error.attrib['Number'], error.text) return xml def _call(self, Command, extra_payload={}): payload, extra_payload = self._payload(Command, extra_payload) xml = self._fetch_xml(payload, extra_payload) return xml class LazyGetListIterator(object): def _get_more_results(self): xml = self.api._fetch_xml(self.payload) xpath = './/{%(ns)s}CommandResponse/{%(ns)s}DomainGetListResult/{%(ns)s}Domain' % {'ns': NAMESPACE} domains = xml.findall(xpath) for domain in domains: self.results.append(domain.attrib) self.payload['Page'] += 1 def __init__(self, api, payload): self.api = api self.payload = payload self.results = [] self.i = -1 def __iter__(self): return self def __next__(self): self.i += 1 if self.i >= len(self.results): self._get_more_results() if self.i >= len(self.results): raise StopIteration else: return self.results[self.i] next = __next__ def domains_dns_setDefault(self, domain): sld, tld = domain.split(".") self._call("namecheap.domains.dns.setDefault", { 'SLD': sld, 'TLD': tld }) def domains_check(self, domains): if not inPy3k: if isinstance(domains, basestring): return self.domains_check([domains]).items()[0][1] else: if isinstance(domains, str): return list(self.domains_check([domains]).items())[0][1] extra_payload = {'DomainList': ",".join(domains)} xml = self._call('namecheap.domains.check', extra_payload) xpath = './/{%(ns)s}CommandResponse/{%(ns)s}DomainCheckResult' % {'ns': NAMESPACE} results = {} for check_result in xml.findall(xpath): results[check_result.attrib['Domain']] = check_result.attrib['Available'] == 'true' return results @classmethod def _tag_without_namespace(cls, element): return element.tag.replace("{%s}" % NAMESPACE, "") @classmethod def _list_of_dictionaries_to_numbered_payload(cls, l): return dict(sum([ [(k + str(i + 1), v) for k, v in d.items()] for i, d in enumerate(l) ], [])) @classmethod def _elements_names_fix(self, host_record): conversion_map = [ ("Name", "HostName"), ("Type", "RecordType") ] for field in conversion_map: if field[0] in host_record: host_record[field[1]] = host_record[field[0]] del(host_record[field[0]]) return host_record def domains_getContacts(self, DomainName): xml = self._call('namecheap.domains.getContacts', {'DomainName': DomainName}) xpath = './/{%(ns)s}CommandResponse/{%(ns)s}DomainContactsResult/*' % {'ns': NAMESPACE} results = {} for contact_type in xml.findall(xpath): fields_for_one_contact_type = {} for contact_detail in contact_type.findall('*'): fields_for_one_contact_type[self._tag_without_namespace(contact_detail)] = contact_detail.text results[self._tag_without_namespace(contact_type)] = fields_for_one_contact_type return results def domains_dns_setHosts(self, domain, host_records): extra_payload = self._list_of_dictionaries_to_numbered_payload(host_records) sld, tld = domain.split(".") extra_payload.update({ 'SLD': sld, 'TLD': tld }) self._call("namecheap.domains.dns.setHosts", extra_payload) def domains_dns_setCustom(self, domain, host_records): extra_payload = host_records sld, tld = domain.split(".") extra_payload['SLD'] = sld extra_payload['TLD'] = tld self._call("namecheap.domains.dns.setCustom", extra_payload) def domains_dns_getHosts(self, domain): sld, tld = domain.split(".") extra_payload = { 'SLD': sld, 'TLD': tld } xml = self._call("namecheap.domains.dns.getHosts", extra_payload) xpath = './/{%(ns)s}CommandResponse/{%(ns)s}DomainDNSGetHostsResult/*' % {'ns': NAMESPACE} results = [] for host in xml.findall(xpath): results.append(host.attrib) return results def domains_dns_addHost(self, domain, host_record): host_records_remote = self.domains_dns_getHosts(domain) print("Remote: %i" % len(host_records_remote)) host_records_remote.append(host_record) host_records_remote = [self._elements_names_fix(x) for x in host_records_remote] print("To set: %i" % len(host_records_remote)) extra_payload = self._list_of_dictionaries_to_numbered_payload(host_records_remote) sld, tld = domain.split(".") extra_payload.update({ 'SLD': sld, 'TLD': tld }) self._call("namecheap.domains.dns.setHosts", extra_payload) def domains_dns_delHost(self, domain, host_record): host_records_remote = self.domains_dns_getHosts(domain) print("Remote: %i" % len(host_records_remote)) host_records_new = [] for r in host_records_remote: cond_type = r["Type"] == host_record["Type"] cond_name = r["Name"] == host_record["Name"] cond_addr = r["Address"] == host_record["Address"] if cond_type and cond_name and cond_addr: pass else: host_records_new.append(r) host_records_new = [self._elements_names_fix(x) for x in host_records_new] print("To set: %i" % len(host_records_new)) if len(host_records_remote) != len(host_records_new) + 1: sys.stderr.write( "Something went wrong while removing host record, delta > 1: %i -> %i, aborting API call.\n" % ( len(host_records_remote), len(host_records_new) ) ) return False extra_payload = self._list_of_dictionaries_to_numbered_payload(host_records_new) sld, tld = domain.split(".") extra_payload.update({ 'SLD': sld, 'TLD': tld }) self._call("namecheap.domains.dns.setHosts", extra_payload)
MIT License
rigetti/reference-qvm
referenceqvm/qvm_stabilizer.py
QVM_Stabilizer._apply_phase
python
def _apply_phase(self, instruction): qubit_label = [value_get(x) for x in instruction.qubits][0] for i in range(2 * self.num_qubits): self.tableau[i, -1] = self.tableau[i, -1] ^ (self.tableau[i, qubit_label] * self.tableau[i, qubit_label + self.num_qubits]) self.tableau[i, qubit_label + self.num_qubits] = self.tableau[i, qubit_label + self.num_qubits] ^ self.tableau[i, qubit_label]
Apply the phase gate instruction ot the tableau :param instruction: pyquil abstract instruction.
https://github.com/rigetti/reference-qvm/blob/e4ca313928f72b3d2348a3f9abfec6607944c59e/referenceqvm/qvm_stabilizer.py#L412-L421
from functools import reduce from pyquil.quil import Program, get_classical_addresses_from_program from pyquil.quilbase import * from pyquil.paulis import PauliTerm, sI, sZ, sX, sY from pyquil.wavefunction import Wavefunction from referenceqvm.qam import QAM from referenceqvm.gates import stabilizer_gate_matrix from referenceqvm.unitary_generator import value_get, tensor_up from referenceqvm.stabilizer_utils import (project_stabilized_state, symplectic_inner_product, binary_stabilizer_to_pauli_stabilizer) class QVM_Stabilizer(QAM): def __init__(self, num_qubits=None, program=None, program_counter=None, classical_memory=None, gate_set=stabilizer_gate_matrix, defgate_set=None): super(QVM_Stabilizer, self).__init__(num_qubits=num_qubits, program=program, program_counter=program_counter, classical_memory=classical_memory, gate_set=gate_set, defgate_set=defgate_set) self.all_inst = False if num_qubits is None: self.tableau = None else: self.tableau = self._n_qubit_tableau(num_qubits) def load_program(self, pyquil_program): if not isinstance(pyquil_program, Program): raise TypeError("I can only generate from pyQuil programs") if self.all_inst is None: raise NotImplementedError("QAM needs to be subclassed in order to " "load program") invalid = False for instr in pyquil_program: if isinstance(instr, Gate): if not instr.name in self.gate_set.keys(): invalid = True break elif isinstance(instr, Measurement): pass else: invalid = True break if invalid is True and self.all_inst is False: raise TypeError("Some gates used are not allowed in this QAM") self.program = pyquil_program self.program_counter = 0 q_max, c_max = self.identify_bits() if c_max <= 512: c_max = 512 self.num_qubits = q_max self.classical_memory = np.zeros(c_max).astype(bool) def _n_qubit_tableau(self, num_qubits): tableau = np.zeros((2 * num_qubits, (2 * num_qubits) + 1), dtype=int) for ii in range(2 * self.num_qubits): tableau[ii, ii] = 1 return tableau def transition(self, instruction): self.pre() self._transition(instruction) self.post() return self.program_counter == len(self.program) def pre(self): pass def post(self): pass def _transition(self, instruction): if isinstance(instruction, Measurement): self._apply_measurement(instruction) self.program_counter += 1 elif isinstance(instruction, Gate): if instruction.name == 'H': self._apply_hadamard(instruction) elif instruction.name == 'S': self._apply_phase(instruction) elif instruction.name == 'CNOT': self._apply_cnot(instruction) elif instruction.name == 'I': pass else: raise TypeError("We checked for correct gate types previously" + " so the impossible has happened!") self.program_counter += 1 elif isinstance(instruction, Jump): self.program_counter = self.find_label(instruction.target) elif isinstance(instruction, JumpTarget): self.program_counter += 1 def run(self, pyquil_program, classical_addresses=None, trials=1): self.load_program(pyquil_program) if classical_addresses is None: classical_addresses = get_classical_addresses_from_program(pyquil_program) results = [] for _ in range(trials): self.tableau = self._n_qubit_tableau(self.num_qubits) self.kernel() results.append(list(map(int, self.classical_memory[classical_addresses]))) assert results[-1] == [int(b) for b in self.classical_memory[classical_addresses]] self.memory_reset() self.program_counter = 0 return results def density(self, pyquil_program): self.load_program(pyquil_program) self.tableau = self._n_qubit_tableau(self.num_qubits) self.kernel() stabilizers = binary_stabilizer_to_pauli_stabilizer(self.stabilizer_tableau()) pauli_ops = reduce(lambda x, y: x * y, [0.5 * (sI(0) + b) for b in stabilizers]) return tensor_up(pauli_ops, self.num_qubits) def wavefunction(self, program): self.load_program(program) self.tableau = self._n_qubit_tableau(self.num_qubits) self.kernel() stabilizers = binary_stabilizer_to_pauli_stabilizer(self.stabilizer_tableau()) stabilizer_state = project_stabilized_state(stabilizers) stabilizer_state = stabilizer_state.toarray() return Wavefunction(stabilizer_state.flatten()) def stabilizer_tableau(self): return self.tableau[self.num_qubits:, :] def destabilizer_tableau(self): return self.tableau[:self.num_qubits, :] def _rowsum(self, h, i): phase_accumulator = self._rowsum_phase_accumulator(h, i) if phase_accumulator % 4 == 0: self.tableau[h, -1] = 0 elif phase_accumulator % 4 == 2: self.tableau[h, -1] = 1 else: raise ValueError("An impossible value for the phase_accumulator has occurred") for j in range(self.num_qubits): self.tableau[h, j] = self.tableau[i, j] ^ self.tableau[h, j] self.tableau[h, j + self.num_qubits] = self.tableau[i, j + self.num_qubits] ^ self.tableau[h, j + self.num_qubits] def _rowsum_phase_accumulator(self, h, i): phase_accumulator = 0 for j in range(self.num_qubits): phase_accumulator += self._g_update(self.tableau[i, j], self.tableau[i, self.num_qubits + j], self.tableau[h, j], self.tableau[h, self.num_qubits + j]) phase_accumulator += 2 * self.tableau[h, -1] phase_accumulator += 2 * self.tableau[i, -1] return phase_accumulator % 4 def _g_update(self, x1, z1, x2, z2): if x1 == z1 == 0: return 0 if x1 == z1 == 1: return z2 - x2 if x1 == 1 and z1 == 0: return z2 * (2 * x2 - 1) if x1 == 0 and z1 == 1: return x2 * (1 - 2 * z2) raise ValueError("We were unable to multiply the pauli operators" + " together!") def _apply_cnot(self, instruction): a, b = [value_get(x) for x in instruction.qubits] for i in range(2 * self.num_qubits): self.tableau[i, -1] = self._cnot_phase_update(i, a, b) self.tableau[i, b] = self.tableau[i, b] ^ self.tableau[i, a] self.tableau[i, a + self.num_qubits] = self.tableau[i, a + self.num_qubits] ^ self.tableau[i, b + self.num_qubits] def _cnot_phase_update(self, i, c, t): return self.tableau[i, -1] ^ (self.tableau[i, c] * self.tableau[i, t + self.num_qubits]) * (self.tableau[i, t] ^ self.tableau[i, c + self.num_qubits] ^ 1) def _apply_hadamard(self, instruction): qubit_label = [value_get(x) for x in instruction.qubits][0] for i in range(2 * self.num_qubits): self.tableau[i, -1] = self.tableau[i, -1] ^ (self.tableau[i, qubit_label] * self.tableau[i, qubit_label + self.num_qubits]) self.tableau[i, [qubit_label, qubit_label + self.num_qubits]] = self.tableau[i, [qubit_label + self.num_qubits, qubit_label]]
Apache License 2.0
tmm1/graphite
whisper/whisper.py
update_many
python
def update_many(path,points): if not points: return points = [ (int(t),float(v)) for (t,v) in points] points.sort(key=lambda p: p[0],reverse=True) fh = open(path,'r+b') return file_update_many(fh, points)
update_many(path,points) path is a string points is a list of (timestamp,value) points
https://github.com/tmm1/graphite/blob/8f17c2c48412270b8f4e849b16bf55866f5084cb/whisper/whisper.py#L529-L539
import os, struct, time, operator, itertools try: import fcntl CAN_LOCK = True except ImportError: CAN_LOCK = False LOCK = False CACHE_HEADERS = False AUTOFLUSH = False __headerCache = {} longFormat = "!L" longSize = struct.calcsize(longFormat) floatFormat = "!f" floatSize = struct.calcsize(floatFormat) valueFormat = "!d" valueSize = struct.calcsize(valueFormat) pointFormat = "!Ld" pointSize = struct.calcsize(pointFormat) metadataFormat = "!2LfL" metadataSize = struct.calcsize(metadataFormat) archiveInfoFormat = "!3L" archiveInfoSize = struct.calcsize(archiveInfoFormat) aggregationTypeToMethod = dict({ 1: 'average', 2: 'sum', 3: 'last', 4: 'max', 5: 'min' }) aggregationMethodToType = dict([[v,k] for k,v in aggregationTypeToMethod.items()]) aggregationMethods = aggregationTypeToMethod.values() debug = startBlock = endBlock = lambda *a,**k: None UnitMultipliers = { 'seconds' : 1, 'minutes' : 60, 'hours' : 3600, 'days' : 86400, 'weeks' : 86400 * 7, 'years' : 86400 * 365 } def getUnitString(s): if 'seconds'.startswith(s): return 'seconds' if 'minutes'.startswith(s): return 'minutes' if 'hours'.startswith(s): return 'hours' if 'days'.startswith(s): return 'days' if 'weeks'.startswith(s): return 'weeks' if 'years'.startswith(s): return 'years' raise ValueError("Invalid unit '%s'" % s) def parseRetentionDef(retentionDef): import re (precision, points) = retentionDef.strip().split(':') if precision.isdigit(): precision = int(precision) * UnitMultipliers[getUnitString('s')] else: precision_re = re.compile(r'^(\d+)([a-z]+)$') match = precision_re.match(precision) if match: precision = int(match.group(1)) * UnitMultipliers[getUnitString(match.group(2))] else: raise ValueError("Invalid precision specification '%s'" % precision) if points.isdigit(): points = int(points) else: points_re = re.compile(r'^(\d+)([a-z]+)$') match = points_re.match(points) if match: points = int(match.group(1)) * UnitMultipliers[getUnitString(match.group(2))] / precision else: raise ValueError("Invalid retention specification '%s'" % points) return (precision, points) class WhisperException(Exception): class InvalidConfiguration(WhisperException): class InvalidAggregationMethod(WhisperException): class InvalidTimeInterval(WhisperException): class TimestampNotCovered(WhisperException): class CorruptWhisperFile(WhisperException): def __init__(self, error, path): Exception.__init__(self, error) self.error = error self.path = path def __repr__(self): return "<CorruptWhisperFile[%s] %s>" % (self.path, self.error) def __str__(self): return "%s (%s)" % (self.error, self.path) def enableDebug(): global open, debug, startBlock, endBlock class open(file): def __init__(self,*args,**kwargs): file.__init__(self,*args,**kwargs) self.writeCount = 0 self.readCount = 0 def write(self,data): self.writeCount += 1 debug('WRITE %d bytes #%d' % (len(data),self.writeCount)) return file.write(self,data) def read(self,bytes): self.readCount += 1 debug('READ %d bytes #%d' % (bytes,self.readCount)) return file.read(self,bytes) def debug(message): print 'DEBUG :: %s' % message __timingBlocks = {} def startBlock(name): __timingBlocks[name] = time.time() def endBlock(name): debug("%s took %.5f seconds" % (name,time.time() - __timingBlocks.pop(name))) def __readHeader(fh): info = __headerCache.get(fh.name) if info: return info originalOffset = fh.tell() fh.seek(0) packedMetadata = fh.read(metadataSize) try: (aggregationType,maxRetention,xff,archiveCount) = struct.unpack(metadataFormat,packedMetadata) except: raise CorruptWhisperFile("Unable to read header", fh.name) archives = [] for i in xrange(archiveCount): packedArchiveInfo = fh.read(archiveInfoSize) try: (offset,secondsPerPoint,points) = struct.unpack(archiveInfoFormat,packedArchiveInfo) except: raise CorruptWhisperFile("Unable to read archive%d metadata" % i, fh.name) archiveInfo = { 'offset' : offset, 'secondsPerPoint' : secondsPerPoint, 'points' : points, 'retention' : secondsPerPoint * points, 'size' : points * pointSize, } archives.append(archiveInfo) fh.seek(originalOffset) info = { 'aggregationMethod' : aggregationTypeToMethod.get(aggregationType, 'average'), 'maxRetention' : maxRetention, 'xFilesFactor' : xff, 'archives' : archives, } if CACHE_HEADERS: __headerCache[fh.name] = info return info def setAggregationMethod(path, aggregationMethod): fh = open(path,'r+b') if LOCK: fcntl.flock( fh.fileno(), fcntl.LOCK_EX ) packedMetadata = fh.read(metadataSize) try: (aggregationType,maxRetention,xff,archiveCount) = struct.unpack(metadataFormat,packedMetadata) except: raise CorruptWhisperFile("Unable to read header", fh.name) try: newAggregationType = struct.pack( longFormat, aggregationMethodToType[aggregationMethod] ) except KeyError: raise InvalidAggregationMethod("Unrecognized aggregation method: %s" % aggregationMethod) fh.seek(0) fh.write(newAggregationType) if AUTOFLUSH: fh.flush() os.fsync(fh.fileno()) if CACHE_HEADERS and fh.name in __headerCache: del __headerCache[fh.name] fh.close() return aggregationTypeToMethod.get(aggregationType, 'average') def validateArchiveList(archiveList): if not archiveList: raise InvalidConfiguration("You must specify at least one archive configuration!") archiveList.sort(key=lambda a: a[0]) for i,archive in enumerate(archiveList): if i == len(archiveList) - 1: break nextArchive = archiveList[i+1] if not archive[0] < nextArchive[0]: raise InvalidConfiguration("A Whisper database may not configured having" "two archives with the same precision (archive%d: %s, archive%d: %s)" % (i, archive, i + 1, nextArchive)) if nextArchive[0] % archive[0] != 0: raise InvalidConfiguration("Higher precision archives' precision " "must evenly divide all lower precision archives' precision " "(archive%d: %s, archive%d: %s)" % (i, archive[0], i + 1, nextArchive[0])) retention = archive[0] * archive[1] nextRetention = nextArchive[0] * nextArchive[1] if not nextRetention > retention: raise InvalidConfiguration("Lower precision archives must cover " "larger time intervals than higher precision archives " "(archive%d: %s seconds, archive%d: %s seconds)" % (i, archive[1], i + 1, nextArchive[1])) archivePoints = archive[1] pointsPerConsolidation = nextArchive[0] / archive[0] if not archivePoints >= pointsPerConsolidation: raise InvalidConfiguration("Each archive must have at least enough points " "to consolidate to the next archive (archive%d consolidates %d of " "archive%d's points but it has only %d total points)" % (i + 1, pointsPerConsolidation, i, archivePoints)) def create(path,archiveList,xFilesFactor=None,aggregationMethod=None,sparse=False): if xFilesFactor is None: xFilesFactor = 0.5 if aggregationMethod is None: aggregationMethod = 'average' validateArchiveList(archiveList) if os.path.exists(path): raise InvalidConfiguration("File %s already exists!" % path) fh = open(path,'wb') if LOCK: fcntl.flock( fh.fileno(), fcntl.LOCK_EX ) aggregationType = struct.pack( longFormat, aggregationMethodToType.get(aggregationMethod, 1) ) oldest = sorted([secondsPerPoint * points for secondsPerPoint,points in archiveList])[-1] maxRetention = struct.pack( longFormat, oldest ) xFilesFactor = struct.pack( floatFormat, float(xFilesFactor) ) archiveCount = struct.pack(longFormat, len(archiveList)) packedMetadata = aggregationType + maxRetention + xFilesFactor + archiveCount fh.write(packedMetadata) headerSize = metadataSize + (archiveInfoSize * len(archiveList)) archiveOffsetPointer = headerSize for secondsPerPoint,points in archiveList: archiveInfo = struct.pack(archiveInfoFormat, archiveOffsetPointer, secondsPerPoint, points) fh.write(archiveInfo) archiveOffsetPointer += (points * pointSize) if sparse: fh.seek(archiveOffsetPointer - headerSize - 1) fh.write("\0") else: remaining = archiveOffsetPointer - headerSize chunksize = 16384 zeroes = '\x00' * chunksize while remaining > chunksize: fh.write(zeroes) remaining -= chunksize fh.write(zeroes[:remaining]) if AUTOFLUSH: fh.flush() os.fsync(fh.fileno()) fh.close() def __aggregate(aggregationMethod, knownValues): if aggregationMethod == 'average': return float(sum(knownValues)) / float(len(knownValues)) elif aggregationMethod == 'sum': return float(sum(knownValues)) elif aggregationMethod == 'last': return knownValues[len(knownValues)-1] elif aggregationMethod == 'max': return max(knownValues) elif aggregationMethod == 'min': return min(knownValues) else: raise InvalidAggregationMethod("Unrecognized aggregation method %s" % aggregationMethod) def __propagate(fh,header,timestamp,higher,lower): aggregationMethod = header['aggregationMethod'] xff = header['xFilesFactor'] lowerIntervalStart = timestamp - (timestamp % lower['secondsPerPoint']) lowerIntervalEnd = lowerIntervalStart + lower['secondsPerPoint'] fh.seek(higher['offset']) packedPoint = fh.read(pointSize) (higherBaseInterval,higherBaseValue) = struct.unpack(pointFormat,packedPoint) if higherBaseInterval == 0: higherFirstOffset = higher['offset'] else: timeDistance = lowerIntervalStart - higherBaseInterval pointDistance = timeDistance / higher['secondsPerPoint'] byteDistance = pointDistance * pointSize higherFirstOffset = higher['offset'] + (byteDistance % higher['size']) higherPoints = lower['secondsPerPoint'] / higher['secondsPerPoint'] higherSize = higherPoints * pointSize relativeFirstOffset = higherFirstOffset - higher['offset'] relativeLastOffset = (relativeFirstOffset + higherSize) % higher['size'] higherLastOffset = relativeLastOffset + higher['offset'] fh.seek(higherFirstOffset) if higherFirstOffset < higherLastOffset: seriesString = fh.read(higherLastOffset - higherFirstOffset) else: higherEnd = higher['offset'] + higher['size'] seriesString = fh.read(higherEnd - higherFirstOffset) fh.seek(higher['offset']) seriesString += fh.read(higherLastOffset - higher['offset']) byteOrder,pointTypes = pointFormat[0],pointFormat[1:] points = len(seriesString) / pointSize seriesFormat = byteOrder + (pointTypes * points) unpackedSeries = struct.unpack(seriesFormat, seriesString) neighborValues = [None] * points currentInterval = lowerIntervalStart step = higher['secondsPerPoint'] for i in xrange(0,len(unpackedSeries),2): pointTime = unpackedSeries[i] if pointTime == currentInterval: neighborValues[i/2] = unpackedSeries[i+1] currentInterval += step knownValues = [v for v in neighborValues if v is not None] if not knownValues: return False knownPercent = float(len(knownValues)) / float(len(neighborValues)) if knownPercent >= xff: aggregateValue = __aggregate(aggregationMethod, knownValues) myPackedPoint = struct.pack(pointFormat,lowerIntervalStart,aggregateValue) fh.seek(lower['offset']) packedPoint = fh.read(pointSize) (lowerBaseInterval,lowerBaseValue) = struct.unpack(pointFormat,packedPoint) if lowerBaseInterval == 0: fh.seek(lower['offset']) fh.write(myPackedPoint) else: timeDistance = lowerIntervalStart - lowerBaseInterval pointDistance = timeDistance / lower['secondsPerPoint'] byteDistance = pointDistance * pointSize lowerOffset = lower['offset'] + (byteDistance % lower['size']) fh.seek(lowerOffset) fh.write(myPackedPoint) return True else: return False def update(path,value,timestamp=None): value = float(value) fh = open(path,'r+b') return file_update(fh, value, timestamp) def file_update(fh, value, timestamp): if LOCK: fcntl.flock( fh.fileno(), fcntl.LOCK_EX ) header = __readHeader(fh) now = int( time.time() ) if timestamp is None: timestamp = now timestamp = int(timestamp) diff = now - timestamp if not ((diff < header['maxRetention']) and diff >= 0): raise TimestampNotCovered("Timestamp not covered by any archives in " "this database.") for i,archive in enumerate(header['archives']): if archive['retention'] < diff: continue lowerArchives = header['archives'][i+1:] break myInterval = timestamp - (timestamp % archive['secondsPerPoint']) myPackedPoint = struct.pack(pointFormat,myInterval,value) fh.seek(archive['offset']) packedPoint = fh.read(pointSize) (baseInterval,baseValue) = struct.unpack(pointFormat,packedPoint) if baseInterval == 0: fh.seek(archive['offset']) fh.write(myPackedPoint) baseInterval,baseValue = myInterval,value else: timeDistance = myInterval - baseInterval pointDistance = timeDistance / archive['secondsPerPoint'] byteDistance = pointDistance * pointSize myOffset = archive['offset'] + (byteDistance % archive['size']) fh.seek(myOffset) fh.write(myPackedPoint) higher = archive for lower in lowerArchives: if not __propagate(fh, header, myInterval, higher, lower): break higher = lower if AUTOFLUSH: fh.flush() os.fsync(fh.fileno()) fh.close()
Apache License 2.0
blacklight/platypush
platypush/plugins/covid19/__init__.py
Covid19Plugin.__init__
python
def __init__(self, country: Union[str, List[str]] = 'world', **kwargs): super().__init__(**kwargs) self.country = [] self.all_countries = requests.get('{}/countries'.format(self.base_url)).json() self.country = self._get_countries(country)
:param country: Default country (or list of countries) to retrieve the stats for. It can either be the full country name or the country code. Special values: - ``world``: Get worldwide stats (default). - ``all``: Get all the available stats.
https://github.com/blacklight/platypush/blob/a5f1dc2638d7c6308325e0ca39dc7d5e262836aa/platypush/plugins/covid19/__init__.py#L15-L26
from typing import Optional, Union, List, Dict, Any import requests from platypush.plugins import Plugin, action class Covid19Plugin(Plugin): base_url = 'https://api.covid19api.com'
MIT License
pg8wood/discord-voice-assistant-bot
bot/music.py
Music.set_next_song_ready
python
def set_next_song_ready(self): self.bot.loop.call_soon_threadsafe(self.advance_queue_event.set)
Tells the queue the next song is ready to be played by setting the advance queue event's internal flag
https://github.com/pg8wood/discord-voice-assistant-bot/blob/a6e6eecc57be9616d85e8a8454d2a190e1b65a8c/bot/music.py#L29-L33
from asyncio import Queue, Event import discord from discord import ChannelType from discord.ext import commands from discord.voice_client import ProcessPlayer import util import validators class Music: def __init__(self, bot): self.bot = bot self.voice_channel = self.bot.voice_clients[0] if len(self.bot.voice_clients) > 0 else None self.current_song = None self.advance_queue_event = Event() self.queue = Queue() self.playlist_task_loop = self.bot.loop.create_task(self.playlist_task())
MIT License
rdflib/pyshacl
pyshacl/shapes_graph.py
ShapesGraph.__init__
python
def __init__(self, graph, logger=None): assert isinstance(graph, (rdflib.Dataset, rdflib.ConjunctiveGraph, rdflib.Graph)) self.graph = graph if isinstance(self.graph, rdflib.Dataset): self.graph.default_union = True if logger is None: logger = logging.getLogger(__name__) self.logger = logger self._node_shape_cache = {} self._shapes = None self._custom_constraints = None self._shacl_functions = {} self._shacl_target_types = {} self._use_js = False self._add_system_triples()
ShapesGraph :param graph: :type graph: rdflib.Graph :param logger: :type logger: logging.Logger|None
https://github.com/rdflib/pyshacl/blob/48155756d14d607f4fe0ea049aa28f42c874e68d/pyshacl/shapes_graph.py#L36-L57
import logging import warnings import rdflib from .constraints.constraint_component import CustomConstraintComponentFactory from .constraints.core.logical_constraints import SH_and, SH_not, SH_or, SH_xone from .constraints.core.shape_based_constraints import SH_qualifiedValueShape from .consts import ( SH, OWL_Class, OWL_DatatypeProperty, RDF_Property, RDF_type, RDFS_Class, RDFS_subClassOf, SH_ConstraintComponent, SH_node, SH_NodeShape, SH_path, SH_property, SH_PropertyShape, SH_targetClass, SH_targetNode, SH_targetObjectsOf, SH_targetSubjectsOf, ) from .errors import ShapeLoadError from .shape import Shape class ShapesGraph(object): system_triples = [(OWL_Class, RDFS_subClassOf, RDFS_Class), (OWL_DatatypeProperty, RDFS_subClassOf, RDF_Property)]
Apache License 2.0
nuagenetworks/vspk-python
vspk/v5_0/nushuntlink.py
NUShuntLink.description
python
def description(self): return self._description
Get description value. Notes: Extra information entered by the operator to define the Shunt Link.
https://github.com/nuagenetworks/vspk-python/blob/375cce10ae144ad6017104e57fcd3630898cc2a6/vspk/v5_0/nushuntlink.py#L315-L323
from .fetchers import NUMetadatasFetcher from .fetchers import NUAlarmsFetcher from .fetchers import NUGlobalMetadatasFetcher from bambou import NURESTObject class NUShuntLink(NURESTObject): __rest_name__ = "shuntlink" __resource_name__ = "shuntlinks" CONST_PERMITTED_ACTION_USE = "USE" CONST_PERMITTED_ACTION_READ = "READ" CONST_PERMITTED_ACTION_ALL = "ALL" CONST_PERMITTED_ACTION_DEPLOY = "DEPLOY" CONST_PERMITTED_ACTION_EXTEND = "EXTEND" CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE" CONST_PERMITTED_ACTION_INSTANTIATE = "INSTANTIATE" CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL" def __init__(self, **kwargs): super(NUShuntLink, self).__init__() self._vlan_peer1_id = None self._vlan_peer2_id = None self._name = None self._last_updated_by = None self._gateway_peer1_id = None self._gateway_peer2_id = None self._permitted_action = None self._description = None self._entity_scope = None self._external_id = None self.expose_attribute(local_name="vlan_peer1_id", remote_name="VLANPeer1ID", attribute_type=str, is_required=True, is_unique=False) self.expose_attribute(local_name="vlan_peer2_id", remote_name="VLANPeer2ID", attribute_type=str, is_required=True, is_unique=False) self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="gateway_peer1_id", remote_name="gatewayPeer1ID", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="gateway_peer2_id", remote_name="gatewayPeer2ID", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="permitted_action", remote_name="permittedAction", attribute_type=str, is_required=False, is_unique=False, choices=[u'ALL', u'DEPLOY', u'EXTEND', u'INSTANTIATE', u'READ', u'USE']) self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL']) self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True) self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self.alarms = NUAlarmsFetcher.fetcher_with_object(parent_object=self, relationship="child") self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self._compute_args(**kwargs) @property def vlan_peer1_id(self): return self._vlan_peer1_id @vlan_peer1_id.setter def vlan_peer1_id(self, value): self._vlan_peer1_id = value @property def vlan_peer2_id(self): return self._vlan_peer2_id @vlan_peer2_id.setter def vlan_peer2_id(self, value): self._vlan_peer2_id = value @property def name(self): return self._name @name.setter def name(self, value): self._name = value @property def last_updated_by(self): return self._last_updated_by @last_updated_by.setter def last_updated_by(self, value): self._last_updated_by = value @property def gateway_peer1_id(self): return self._gateway_peer1_id @gateway_peer1_id.setter def gateway_peer1_id(self, value): self._gateway_peer1_id = value @property def gateway_peer2_id(self): return self._gateway_peer2_id @gateway_peer2_id.setter def gateway_peer2_id(self, value): self._gateway_peer2_id = value @property def permitted_action(self): return self._permitted_action @permitted_action.setter def permitted_action(self, value): self._permitted_action = value @property
BSD 3-Clause New or Revised License
nervanasystems/neon
neon/callbacks/callbacks.py
MultiLabelStatsCallback.on_epoch_end
python
def on_epoch_end(self, callback_data, model, epoch): if (epoch + 1) % self.epoch_freq == 0: self.eval_set.reset() running_stats = np.zeros_like(self.metric.outputs.get(), dtype=np.float32) nbatch = 0 for x, t in self.eval_set: x = model.fprop(x, inference=True) self.metric(x, t) running_stats += self.metric.outputs.get() nbatch += 1 running_stats /= nbatch for i, label in enumerate(self.labels): metric_text = "[" for k, metric in enumerate(self.metric.metric_names): metric_text += "%s: %d%% " % (metric, running_stats[i][k] * 100.0) metric_text += "] -> %s\n" % label sys.stdout.write(metric_text) sys.stdout.flush()
Called when an epoch is about to end Arguments: callback_data (HDF5 dataset): shared data between callbacks model (Model): model object epoch (int): index of epoch that is ending
https://github.com/nervanasystems/neon/blob/8c3fb8a93b4a89303467b25817c60536542d08bd/neon/callbacks/callbacks.py#L904-L937
from __future__ import division from builtins import map, str, zip from future.utils import native from collections import deque import h5py import inspect import logging import numpy as np import os import signal import sys import time import math from timeit import default_timer import weakref from neon import NervanaObject, logger as neon_logger from neon.data import NervanaDataIterator, Ticker from neon.util.compat import PY3 from neon.util.persist import load_obj, save_obj, load_class from neon.layers import Convolution, BatchNorm, Multicost from neon.transforms.cost import Metric logger = logging.getLogger(__name__) class Callbacks(NervanaObject): def __init__(self, model, train_set=None, output_file=None, eval_freq=None, progress_bar=True, save_path=None, serialize=0, history=1, model_file=None, eval_set=None, metric=None, log_token=None, multicost=False): if train_set is not None: logger.warning("Deprecation warning. Callbacks class no longer " "accepts train_set as a parameter. This argument will " "be removed soon update your code.") super(Callbacks, self).__init__(name=None) self.callbacks = list() self.epoch_marker = 0 self.output_file = output_file if output_file is None: if hasattr(self, 'callback_data'): del self.callback_data self.callback_data = h5py.File(self.name, driver='core', backing_store=False) else: if os.path.isfile(output_file): logger.warn("Overwriting output file %s", output_file) os.remove(output_file) self.callback_data = h5py.File(output_file, "w") self.model = weakref.ref(model) self.model_file = model_file if multicost is True: self.add_callback(TrainMulticostCallback()) else: self.add_callback(TrainCostCallback()) if progress_bar: self.add_callback(ProgressBarCallback()) if eval_freq: if not eval_set: err_msg = 'Evaluation frequency specified but no eval set provided!' logger.exception(err_msg) raise ValueError(err_msg) ecb = LossCallback(eval_set, eval_freq) self.add_callback(ecb, insert_pos=0) if metric: ecb = MetricCallback(eval_set, metric, eval_freq) self.add_callback(ecb, insert_pos=None) self.save_path = save_path if save_path: serialize_interval = serialize if serialize > 1 else 1 scb = SerializeModelCallback(save_path, serialize_interval, history) self.add_callback(scb) self.add_callback(TrainLoggerCallback()) self.add_callback(RunTimerCallback()) try: from cloud_metrics import CloudMetricsCallback self.add_callback(CloudMetricsCallback(log_token, eval_freq, metric)) except ImportError: pass def __del__(self): try: self.callback_data.close() except Exception: pass def serialize(self): return self.get_description() def get_description(self): cdict = {} cdict['epoch_marker'] = self.epoch_marker cdict['output_file'] = self.output_file cdict['callbacks'] = [] for callback in self.callbacks: cdict['callbacks'].append(callback.get_description()) return cdict @classmethod def load_callbacks(cls, cdict, model, data=[]): if type(native(cdict)) is str: cdict = load_obj(cdict) callbacks = cls(model, output_file=cdict['output_file']) callbacks.epoch_marker = cdict['epoch_marker'] callbacks.callbacks = [] for cb in cdict['callbacks']: module = load_class(cb['type']) callbacks.callbacks.append(module(**cb['config'])) return callbacks def add_deconv_callback(self, train_set, valid_set, max_fm=16, dataset_pct=25): self.add_callback(DeconvCallback(train_set, valid_set, max_fm=max_fm, dataset_pct=dataset_pct)) def add_save_best_state_callback(self, path): self.add_callback(SaveBestStateCallback(path)) def add_watch_ticker_callback(self, valid): self.callbacks.append(WatchTickerCallback(self.model, valid)) def add_early_stop_callback(self, stop_func): self.add_callback(EarlyStopCallback(stop_func)) def add_hist_callback(self, plot_per_mini=False, filter_key=['W']): self.callbacks.append(HistCallback(plot_per_mini=plot_per_mini, filter_key=filter_key)) def add_callback(self, callback, insert_pos=None): if insert_pos is None: self.callbacks.append(callback) else: self.callbacks.insert(insert_pos, callback) def on_train_begin(self, epochs): config = self.callback_data.create_group('config') total_minibatches = math.ceil(self.model().ndata / self.be.bsz) * epochs config.attrs['total_minibatches'] = total_minibatches config.attrs['total_epochs'] = epochs time_markers = self.callback_data.create_group("time_markers") time_markers.create_dataset("minibatch", (epochs,)) if self.model_file: self.model().load_params(self.model_file) signal.signal(signal.SIGINT, self.on_sigint_catch) for c in self.callbacks: c.on_train_begin(self.callback_data, self.model(), epochs) def on_train_end(self): signal.signal(signal.SIGINT, signal.SIG_DFL) for c in self.callbacks: c.on_train_end(self.callback_data, self.model()) self.callback_data.close() def on_epoch_begin(self, epoch): for c in self.callbacks: if c.should_fire(self.callback_data, self.model(), epoch, c.epoch_freq): c.on_epoch_begin(self.callback_data, self.model(), epoch) def on_epoch_end(self, epoch): for c in self.callbacks: if c.should_fire(self.callback_data, self.model(), epoch, c.epoch_freq): c.on_epoch_end(self.callback_data, self.model(), epoch) self.epoch_marker += self.epoch_minibatches self.callback_data['time_markers/minibatch'][epoch] = self.epoch_marker self.callback_data['time_markers'].attrs['epochs_complete'] = epoch + 1 self.callback_data['time_markers'].attrs['minibatches_complete'] = self.epoch_marker self.callback_data.flush() def on_minibatch_begin(self, epoch, minibatch): for c in self.callbacks: if c.should_fire(self.callback_data, self.model(), minibatch, c.minibatch_freq): c.on_minibatch_begin(self.callback_data, self.model(), epoch, minibatch) def on_minibatch_end(self, epoch, minibatch): for c in self.callbacks: if c.should_fire(self.callback_data, self.model(), minibatch, c.minibatch_freq): c.on_minibatch_end(self.callback_data, self.model(), epoch, minibatch) self.epoch_minibatches = minibatch + 1 def on_sigint_catch(self, epoch, minibatch): signal.signal(signal.SIGINT, signal.SIG_DFL) if self.save_path is not None: save_obj(self.model().serialize(keep_states=True), self.save_path) raise KeyboardInterrupt('Checkpoint file saved to {0}'.format(self.save_path)) else: raise KeyboardInterrupt class Callback(NervanaObject): def __init__(self, epoch_freq=1, minibatch_freq=1): self.epoch_freq = epoch_freq self.minibatch_freq = minibatch_freq self.costnm = None def get_description(self): keys = inspect.getargspec(self.__init__)[0] keys.remove('self') skip = [] for key in keys: if isinstance(getattr(self, key), NervanaDataIterator): skip.append(key) pdict = super(Callback, self).get_description(skip=skip) for datap in skip: pdict['config'][datap] = {'type': 'Data', 'name': getattr(self, datap).name} return pdict def on_train_begin(self, callback_data, model, epochs): pass def on_train_end(self, callback_data, model): pass def on_epoch_begin(self, callback_data, model, epoch): pass def on_epoch_end(self, callback_data, model, epoch): pass def on_minibatch_begin(self, callback_data, model, epoch, minibatch): pass def on_minibatch_end(self, callback_data, model, epoch, minibatch): pass def should_fire(self, callback_data, model, time, freq): t, f = time, freq if ((type(f) is int and (t + 1) % f == 0) or (type(f) is list and t in f)): return True return False def _get_cached_epoch_loss(self, callback_data, model, epoch, label): if self.costnm is None: self.costnm = "Loss" if model.cost and hasattr(model.cost, 'costfunc'): self.costnm = model.cost.costfunc.__class__.__name__ + " " + self.costnm cost_key = 'cost/' + label time_key = 'time/' + label if cost_key not in callback_data: return None eval_freq = callback_data[cost_key].attrs['epoch_freq'] if (epoch + 1) % eval_freq == 0: return dict(cost=callback_data[cost_key][epoch // eval_freq], time=callback_data[time_key][epoch // eval_freq], costnm=self.costnm) class SerializeModelCallback(Callback): def __init__(self, save_path, epoch_freq=1, history=1): super(SerializeModelCallback, self).__init__(epoch_freq=epoch_freq) self.save_path = save_path self.history = history self.checkpoint_files = deque() def on_epoch_end(self, callback_data, model, epoch): if self.history > 1: self.save_history(epoch, model) else: save_obj(model.serialize(keep_states=True), self.save_path) def save_history(self, epoch, model): if len(self.checkpoint_files) > self.history: fn = self.checkpoint_files.popleft() try: os.remove(fn) logger.info('removed old checkpoint %s' % fn) except OSError: logger.warn('Could not delete old checkpoint file %s' % fn) path_split = os.path.splitext(self.save_path) save_path = '%s_%d%s' % (path_split[0], epoch, path_split[1]) self.checkpoint_files.append(save_path) save_obj(model.serialize(keep_states=True), save_path) try: if os.path.islink(self.save_path): os.remove(self.save_path) os.symlink(os.path.split(save_path)[-1], self.save_path) except OSError: logger.warn('Could not create latest model symlink %s -> %s' % (self.save_path, save_path)) class RunTimerCallback(Callback): def __init__(self): super(RunTimerCallback, self).__init__() def on_train_begin(self, callback_data, model, epochs): timing = callback_data.create_group("time/train") timing.create_dataset("start_time", (1,), dtype='float64') timing.create_dataset("end_time", (1,), dtype='float64') timing['start_time'][0] = time.time() timing['start_time'].attrs['units'] = 'seconds' def on_train_end(self, callback_data, model): callback_data['time/train/end_time'][0] = time.time() callback_data['time/train/end_time'].attrs['units'] = 'seconds' class TrainCostCallback(Callback): def __init__(self, wsz=10): super(TrainCostCallback, self).__init__(epoch_freq=1) self.wsz = wsz def on_train_begin(self, callback_data, model, epochs): points = callback_data['config'].attrs['total_minibatches'] callback_data.create_dataset("cost/train", (points,)) self.wsz = min(points, self.wsz) self.cost_history = deque([], maxlen=int(self.wsz)) callback_data['cost/train'].attrs['time_markers'] = 'minibatch' def on_minibatch_end(self, callback_data, model, epoch, minibatch): self.cost_history.append(model.cost.cost) mean_cost = sum(self.cost_history) / len(self.cost_history) mbstart = callback_data['time_markers/minibatch'][epoch - 1] if epoch > 0 else 0 callback_data['cost/train'][mbstart + minibatch] = mean_cost class TrainMulticostCallback(Callback): def __init__(self, wsz=10): super(TrainMulticostCallback, self).__init__(epoch_freq=1) self.wsz = wsz def on_train_begin(self, callback_data, model, epochs): assert isinstance(model.cost, Multicost), "Cost must be a Multicost" self.ncosts = len(model.cost.costs) self.ncosts_allbranches = sum([self.recursive_multicost_len(c) for c in model.cost.costs]) points = callback_data['config'].attrs['total_minibatches'] callback_data.create_dataset("multicost/train", (points, self.ncosts), dtype='float64') callback_data.create_dataset("multicost/train_allbranches", (points, self.ncosts_allbranches), dtype='float64') self.wsz = min(points, self.wsz) self.cost_history = deque([], maxlen=int(self.wsz)) callback_data['multicost/train'].attrs['time_markers'] = 'minibatch' callback_data['multicost/train_allbranches'].attrs['time_markers'] = 'minibatch' def on_minibatch_end(self, callback_data, model, epoch, minibatch): costs = np.array([c.cost for c in model.cost.costs]) self.cost_history.append(costs) mean_cost = sum(self.cost_history) / len(self.cost_history) mbstart = callback_data['time_markers/minibatch'][epoch-1] if epoch > 0 else 0 callback_data['multicost/train'][mbstart + minibatch, :] = mean_cost.squeeze() costs_allbranches = np.array([self.multicost_recurse(c) for c in model.cost.costs]) costs_allbranches = self.separate_branch_costs(costs_allbranches) callback_data['multicost/train_allbranches'][mbstart + minibatch, :] = costs_allbranches.squeeze() def multicost_recurse(self, x): if type(x) == Multicost: return [z for z in map(self.multicost_recurse, x.costs)] else: return x.cost def separate_branch_costs(self, x): x[0] -= np.sum([c[0] if type(c) == list else c for c in x[1:]]) for branch in x: if type(branch) == list: self.separate_branch_costs(branch) return np.array([item for sublist in x for item in sublist]) def recursive_multicost_len(self, item): if type(item) == Multicost: return sum(self.recursive_multicost_len(subitem) for subitem in item.costs) else: return 1 class LossCallback(Callback): def __init__(self, eval_set, epoch_freq=1): super(LossCallback, self).__init__(epoch_freq=epoch_freq) self.eval_set = eval_set self.loss = self.be.zeros((1, 1), dtype=np.float32) def on_train_begin(self, callback_data, model, epochs): callback_data.create_dataset("cost/loss", (epochs // self.epoch_freq,)) callback_data.create_dataset("time/loss", (epochs // self.epoch_freq,)) callback_data["cost/loss"].attrs['time_markers'] = 'epoch_freq' callback_data["cost/loss"].attrs['epoch_freq'] = self.epoch_freq def on_epoch_end(self, callback_data, model, epoch): start_loss = default_timer() nprocessed = 0 self.loss[:] = 0 self.eval_set.reset() for x, t in self.eval_set: x = model.fprop(x, inference=True) bsz = min(self.eval_set.ndata - nprocessed, self.be.bsz) model.cost.get_cost(x, t) nsteps = x.shape[1] // self.be.bsz if not isinstance(x, list) else x[0].shape[1] // self.be.bsz costbuf = model.cost.outputs[:, :bsz * nsteps] nprocessed += bsz self.loss[:] = self.loss + self.be.sum(costbuf, axis=1) / nsteps mean_cost = float(self.loss.get() / nprocessed) callback_data["time/loss"][epoch // self.epoch_freq] = (default_timer() - start_loss) callback_data["cost/loss"][epoch // self.epoch_freq] = mean_cost class MetricCallback(Callback): def __init__(self, eval_set, metric, epoch_freq=1): super(MetricCallback, self).__init__(epoch_freq=epoch_freq) if isinstance(metric, type) and issubclass(metric, Metric): raise ValueError(( 'metric passed in was the class {}. Pass an instance ' 'of this class instead.' ).format(metric)) self.eval_set = eval_set self.metric = metric self.metric_cnt = len(self.metric.metric_names) self.metric_desc = ", ".join(self.metric.metric_names) def on_train_begin(self, callback_data, model, epochs): if 'metrics' not in callback_data: callback_data.create_group("metrics") for met in self.metric.metric_names: group_name = "metrics/%s" % met callback_data.create_dataset(group_name, (epochs // self.epoch_freq,)) callback_data[group_name].attrs['time_markers'] = 'epoch_freq' callback_data[group_name].attrs['epoch_freq'] = self.epoch_freq def on_epoch_end(self, callback_data, model, epoch): if (epoch + 1) % self.epoch_freq == 0: self.eval_set.reset() stats = model.eval(self.eval_set, metric=self.metric) logger.info('%s: %s', self.metric_desc, ", ".join(map(str, stats.flatten()))) for ind, met in enumerate(self.metric.metric_names): callback_data["metrics/%s" % met][epoch // self.epoch_freq] = stats[ind] class MultiLabelStatsCallback(Callback): def __init__(self, eval_set, labels, metric, epoch_freq=1): super(MultiLabelStatsCallback, self).__init__(epoch_freq=epoch_freq) self.eval_set = eval_set self.metric = metric self.labels = labels self.metric_desc = ", ".join(self.metric.metric_names)
Apache License 2.0
yannforget/osmxtract
osmxtract/overpass.py
_bbox
python
def _bbox(lat_min, lon_min, lat_max, lon_max): return f'({lat_min},{lon_min},{lat_max},{lon_max})'
Format bounding box as a string as expected by the Overpass API.
https://github.com/yannforget/osmxtract/blob/e8659d8a62c72220711e1454896ed3642b78e276/osmxtract/overpass.py#L33-L35
import requests import geojson from .errors import ( OverpassBadRequest, OverpassMoved, OverpassTooManyRequests, OverpassGatewayTimeout ) def _make_case_insensitive(value): return f'[{ value[0].lower() }{ value[0].upper() }]{ value[1:] }'
MIT License
facelessuser/sublime-markdown-popups
st3/mdpopups/mdx/highlight.py
Highlight.get_extended_language
python
def get_extended_language(self, language): return self.extend_pygments_lang.get(language, (language, {}))
Get extended language.
https://github.com/facelessuser/sublime-markdown-popups/blob/aeb7586da26fe46b7764cf1e2832336bc306195d/st3/mdpopups/mdx/highlight.py#L376-L379
import re from ..markdown import Extension from ..markdown.treeprocessors import Treeprocessor import xml.etree.ElementTree as etree import copy from collections import OrderedDict try: from ..pygments import highlight from ..pygments.lexers import get_lexer_by_name, guess_lexer from ..pygments.formatters import find_formatter_class HtmlFormatter = find_formatter_class('html') pygments = True except ImportError: pygments = False CODE_WRAP = '<pre%s><code%s%s%s>%s</code></pre>' CODE_WRAP_ON_PRE = '<pre%s%s%s><code>%s</code></pre>' CLASS_ATTR = ' class="%s"' ID_ATTR = ' id="%s"' DEFAULT_CONFIG = { 'use_pygments': [ True, 'Use Pygments to highlight code blocks. ' 'Disable if using a JavaScript library. ' 'Default: True' ], 'guess_lang': [ False, "Automatic language detection - Default: False" ], 'css_class': [ 'highlight', "CSS class to apply to wrapper element." ], 'pygments_style': [ 'default', 'Pygments HTML Formatter Style ' '(color scheme) - Default: default' ], 'noclasses': [ False, 'Use inline styles instead of CSS classes - ' 'Default false' ], 'linenums': [ None, 'Display line numbers in block code output (not inline) - Default: False' ], 'linenums_style': [ 'table', 'Line number style -Default: "table"' ], 'linenums_special': [ -1, 'Globally make nth line special - Default: -1' ], 'linenums_class': [ "linenums", "Control the linenums class name when not using Pygments - Default: 'linenums'" ], 'extend_pygments_lang': [ [], 'Extend pygments language with special language entry - Default: {}' ], 'legacy_no_wrap_code': [ False, 'Do not wrap block code under pre elements with code elements - Default: False' ], 'language_prefix': [ 'language-', 'Controls the language prefix for non-Pygments code blocks. - Defaults: "language-"' ], 'code_attr_on_pre': [ False, "Attach attribute list values on pre element instead of code element - Default: False" ], '_enabled': [ True, 'Used internally to communicate if extension has been explicitly enabled - Default: False' ] } multi_space = re.compile(r'(?<= ) {2,}') def replace_nbsp(m): return '&nbsp;' * len(m.group(0)) if pygments: html_re = re.compile( r'''(?x) (?P<start><span [^<>]+>)|(?P<content>[^<>]+)|(?P<end></span>) ''' ) class InlineHtmlFormatter(HtmlFormatter): def wrap(self, source, outfile): return self._wrap_code(source) def _wrap_code(self, source): yield 0, '' for i, t in source: yield i, t.strip() yield 0, '' class BlockHtmlFormatter(HtmlFormatter): RE_SPAN_NUMS = re.compile(r'(<span[^>]*?)(class="[^"]*\blinenos?\b[^"]*)"([^>]*)>([^<]+)(</span>)') RE_TABLE_NUMS = re.compile(r'(<pre[^>]*>)(?!<span></span>)') def __init__(self, **options): self.pymdownx_inline = options.get('linenos', False) == 'pymdownx-inline' if self.pymdownx_inline: options['linenos'] = 'inline' HtmlFormatter.__init__(self, **options) def _format_custom_line(self, m): return ( m.group(1) + m.group(2) + '"' + m.group(3) + ' data-linenos="' + m.group(4).rstrip() + ' ">' + m.group(5) ) def _wrap_customlinenums(self, inner): for t, line in inner: if t: line = self.RE_SPAN_NUMS.sub(self._format_custom_line, line) yield t, line def wrap(self, source, outfile): if self.linenos == 2 and self.pymdownx_inline: source = self._wrap_customlinenums(source) return HtmlFormatter.wrap(self, source, outfile) def _wrap_tablelinenos(self, inner): for t, line in HtmlFormatter._wrap_tablelinenos(self, inner): yield t, self.RE_TABLE_NUMS.sub(r'\1<span></span>', line) class SublimeInlineHtmlFormatter(HtmlFormatter): def wrap(self, source, outfile): return self._wrap_code(source) def _wrap_code(self, source): yield 0, '' for i, t in source: text = '' matched = False for m in html_re.finditer(t): matched = True if m.group(1): text += m.group(1) elif m.group(3): text += m.group(3) else: text += multi_space.sub( replace_nbsp, m.group(2).replace('\t', ' ' * 4) ) if not matched: text = multi_space.sub( replace_nbsp, t.replace('\t', ' ' * 4) ) yield i, text yield 0, '' class SublimeBlockFormatter(BlockHtmlFormatter): def wrap(self, source, outfile): if self.linenos == 2 and self.pymdownx_inline: source = self._wrap_customlinenums(source) return self._wrap_code(source) def _wrap_code(self, source): yield 0, '<div class="%s"><pre>' % self.cssclass for i, t in source: text = '' matched = False for m in html_re.finditer(t): matched = True if m.group(1): text += m.group(1) elif m.group(3): text += m.group(3) else: text += m.group(2).replace('\t', ' ' * 4).replace(' ', '&nbsp;') if not matched: text = t.replace('\t', ' ' * 4).replace(' ', '&nbsp;') if i == 1: text += '<br>' yield i, text yield 0, '</pre></div>' class SublimeWrapBlockFormatter(BlockHtmlFormatter): def wrap(self, source, outfile): if self.linenos == 2 and self.pymdownx_inline: source = self._wrap_customlinenums(source) return self._wrap_code(source) def _wrap_code(self, source): yield 0, '<div class="%s"><pre>' % self.cssclass for i, t in source: text = '' matched = False for m in html_re.finditer(t): matched = True if m.group(1): text += m.group(1) elif m.group(3): text += m.group(3) else: text += multi_space.sub( replace_nbsp, m.group(2).replace('\t', ' ' * 4) ) if not matched: text = multi_space.sub( replace_nbsp, t.replace('\t', ' ' * 4) ) if i == 1: text += '<br>' yield i, text yield 0, '</pre></div>' class Highlight(object): def __init__( self, guess_lang=False, pygments_style='default', use_pygments=True, noclasses=False, extend_pygments_lang=None, linenums=None, linenums_special=-1, linenums_style='table', linenums_class='linenums', wrapcode=True, language_prefix='language-', code_attr_on_pre=False ): self.guess_lang = guess_lang self.pygments_style = pygments_style self.use_pygments = use_pygments self.noclasses = noclasses self.linenums = linenums self.linenums_style = linenums_style self.linenums_special = linenums_special self.linenums_class = linenums_class self.wrapcode = wrapcode self.language_prefix = language_prefix self.code_attr_on_pre = code_attr_on_pre self.sublime_hl = Highlight.sublime_hl self.sublime_wrap = Highlight.sublime_wrap if extend_pygments_lang is None: extend_pygments_lang = [] self.extend_pygments_lang = {} for language in extend_pygments_lang: if isinstance(language, (dict, OrderedDict)): name = language.get('name') if name is not None and name not in self.extend_pygments_lang: self.extend_pygments_lang[name] = [ language.get('lang'), language.get('options', {}) ] @classmethod def set_sublime_vars(cls, sublime_hl, sublime_wrap, plugin_map): cls.sublime_hl = sublime_hl cls.sublime_wrap = sublime_wrap cls.plugin_map = plugin_map
MIT License
tomplus/kubernetes_asyncio
kubernetes_asyncio/client/models/v1_csi_node.py
V1CSINode.spec
python
def spec(self): return self._spec
Gets the spec of this V1CSINode. # noqa: E501 :return: The spec of this V1CSINode. # noqa: E501 :rtype: V1CSINodeSpec
https://github.com/tomplus/kubernetes_asyncio/blob/22bf0f4ec775b920abc9cee86bb38abcfc57506d/kubernetes_asyncio/client/models/v1_csi_node.py#L137-L144
import pprint import re import six from kubernetes_asyncio.client.configuration import Configuration class V1CSINode(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'api_version': 'str', 'kind': 'str', 'metadata': 'V1ObjectMeta', 'spec': 'V1CSINodeSpec' } attribute_map = { 'api_version': 'apiVersion', 'kind': 'kind', 'metadata': 'metadata', 'spec': 'spec' } def __init__(self, api_version=None, kind=None, metadata=None, spec=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._api_version = None self._kind = None self._metadata = None self._spec = None self.discriminator = None if api_version is not None: self.api_version = api_version if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata self.spec = spec @property def api_version(self): return self._api_version @api_version.setter def api_version(self, api_version): self._api_version = api_version @property def kind(self): return self._kind @kind.setter def kind(self, kind): self._kind = kind @property def metadata(self): return self._metadata @metadata.setter def metadata(self, metadata): self._metadata = metadata @property
Apache License 2.0
jest-community/jest-pytest
src/__tests__/integration/home-assistant/tests/components/binary_sensor/test_nx584.py
TestNX584SensorSetup.tearDown
python
def tearDown(self): self.hass.stop() self._mock_client.stop()
Stop everything that was started.
https://github.com/jest-community/jest-pytest/blob/b197b0b31e3ca5c411202d97583cbd2d2b0b92e9/src/__tests__/integration/home-assistant/tests/components/binary_sensor/test_nx584.py#L39-L42
import requests import unittest from unittest import mock from nx584 import client as nx584_client from homeassistant.components.binary_sensor import nx584 from homeassistant.setup import setup_component from tests.common import get_test_home_assistant class StopMe(Exception): pass class TestNX584SensorSetup(unittest.TestCase): def setUp(self): self.hass = get_test_home_assistant() self._mock_client = mock.patch.object(nx584_client, 'Client') self._mock_client.start() self.fake_zones = [ {'name': 'front', 'number': 1}, {'name': 'back', 'number': 2}, {'name': 'inside', 'number': 3}, ] client = nx584_client.Client.return_value client.list_zones.return_value = self.fake_zones client.get_version.return_value = '1.1'
MIT License
lithops-cloud/lithops
lithops/invokers.py
FaaSInvoker._invoke_job
python
def _invoke_job(self, job): if self.remote_invoker: return self._invoke_job_remote(job) if self.should_run is False: self.running_workers = 0 self.should_run = True self._start_async_invokers() if self.running_workers < self.max_workers: free_workers = self.max_workers - self.running_workers total_direct = free_workers * job.chunksize callids = range(job.total_calls) callids_to_invoke_direct = callids[:total_direct] callids_to_invoke_nondirect = callids[total_direct:] ci = len(callids_to_invoke_direct) cz = job.chunksize consumed_workers = ci // cz + (ci % cz > 0) self.running_workers += consumed_workers logger.debug('ExecutorID {} | JobID {} - Free workers:' ' {} - Going to run {} activations in {} workers' .format(job.executor_id, job.job_id, free_workers, len(callids_to_invoke_direct), consumed_workers)) def _callback(future): future.result() invoke_futures = [] for call_ids_range in iterchunks(callids_to_invoke_direct, job.chunksize): future = self.executor.submit(self._invoke_task, job, call_ids_range) future.add_done_callback(_callback) invoke_futures.append(future) if self.sync: [f.result() for f in invoke_futures] if callids_to_invoke_nondirect: logger.debug('ExecutorID {} | JobID {} - Putting remaining ' '{} function activations into pending queue' .format(job.executor_id, job.job_id, len(callids_to_invoke_nondirect))) for call_ids_range in iterchunks(callids_to_invoke_nondirect, job.chunksize): self.pending_calls_q.put((job, call_ids_range)) else: logger.debug('ExecutorID {} | JobID {} - Reached maximum {} ' 'workers, queuing {} function activations' .format(job.executor_id, job.job_id, self.max_workers, job.total_calls)) for call_ids_range in iterchunks(range(job.total_calls), job.chunksize): self.pending_calls_q.put((job, call_ids_range))
Normal Invocation Use local threads to perform all the function invocations
https://github.com/lithops-cloud/lithops/blob/a274a0bc423e22b9a68834cac5d63130666a4ee8/lithops/invokers.py#L408-L464
import os import sys import time import random import queue import shutil import logging import threading from concurrent.futures import ThreadPoolExecutor from lithops.future import ResponseFuture from lithops.config import extract_storage_config from lithops.version import __version__ as lithops_version from lithops.utils import version_str, is_lithops_worker, iterchunks from lithops.constants import LOGGER_LEVEL, LOGS_DIR, LOCALHOST, SERVERLESS, STANDALONE from lithops.util.metrics import PrometheusExporter logger = logging.getLogger(__name__) def create_invoker(config, executor_id, internal_storage, compute_handler, job_monitor): if compute_handler.get_backend_type() == 'batch': return BatchInvoker( config, executor_id, internal_storage, compute_handler, job_monitor ) elif compute_handler.get_backend_type() == 'faas': return FaaSInvoker( config, executor_id, internal_storage, compute_handler, job_monitor ) class Invoker: def __init__(self, config, executor_id, internal_storage, compute_handler, job_monitor): log_level = logger.getEffectiveLevel() self.log_active = log_level != logging.WARNING self.log_level = LOGGER_LEVEL if not self.log_active else log_level self.config = config self.executor_id = executor_id self.storage_config = extract_storage_config(self.config) self.internal_storage = internal_storage self.compute_handler = compute_handler self.is_lithops_worker = is_lithops_worker() self.job_monitor = job_monitor prom_enabled = self.config['lithops'].get('telemetry', False) prom_config = self.config.get('prometheus', {}) self.prometheus = PrometheusExporter(prom_enabled, prom_config) self.mode = self.config['lithops']['mode'] self.backend = self.config['lithops']['backend'] self.customized_runtime = self.config['lithops'].get('customized_runtime', False) self.runtime_name = self.config[self.backend]['runtime'] self.max_workers = self.config[self.backend].get('max_workers') logger.debug(f'ExecutorID {self.executor_id} - Invoker initialized.' f' Max workers: {self.max_workers}') def select_runtime(self, job_id, runtime_memory): if self.mode == SERVERLESS: runtime_memory = runtime_memory or self.config[self.backend].get('runtime_memory') runtime_timeout = self.config[self.backend].get('runtime_timeout') elif self.mode == STANDALONE: runtime_memory = None runtime_timeout = self.config[STANDALONE]['hard_dismantle_timeout'] elif self.mode == LOCALHOST: runtime_memory = None runtime_timeout = None msg = ('ExecutorID {} | JobID {} - Selected Runtime: {} ' .format(self.executor_id, job_id, self.runtime_name)) msg = msg+'- {}MB'.format(runtime_memory) if runtime_memory else msg logger.info(msg) runtime_key = self.compute_handler.get_runtime_key(self.runtime_name, runtime_memory) runtime_meta = self.internal_storage.get_runtime_meta(runtime_key) if not runtime_meta: msg = 'Runtime {}'.format(self.runtime_name) msg = msg+' with {}MB'.format(runtime_memory) if runtime_memory else msg logger.info(msg+' is not yet installed') runtime_meta = self.compute_handler.create_runtime(self.runtime_name, runtime_memory, runtime_timeout) runtime_meta['runtime_timeout'] = runtime_timeout self.internal_storage.put_runtime_meta(runtime_key, runtime_meta) if lithops_version != runtime_meta['lithops_version']: raise Exception("Lithops version mismatch. Host version: {} - Runtime version: {}" .format(lithops_version, runtime_meta['lithops_version'])) py_local_version = version_str(sys.version_info) py_remote_version = runtime_meta['python_version'] if py_local_version != py_remote_version: raise Exception(("The indicated runtime '{}' is running Python {} and it " "is not compatible with the local Python version {}") .format(self.runtime_name, py_remote_version, py_local_version)) return runtime_meta def _create_payload(self, job): payload = {'config': self.config, 'chunksize': job.chunksize, 'log_level': self.log_level, 'func_key': job.func_key, 'data_key': job.data_key, 'extra_env': job.extra_env, 'total_calls': job.total_calls, 'execution_timeout': job.execution_timeout, 'data_byte_ranges': job.data_byte_ranges, 'executor_id': job.executor_id, 'job_id': job.job_id, 'job_key': job.job_key, 'max_workers': self.max_workers, 'call_ids': None, 'host_submit_tstamp': time.time(), 'lithops_version': lithops_version, 'runtime_name': job.runtime_name, 'runtime_memory': job.runtime_memory, 'worker_processes': job.worker_processes} return payload def _run_job(self, job): if self.customized_runtime: logger.debug('ExecutorID {} | JobID {} - Customized runtime activated' .format(job.executor_id, job.job_id)) job.runtime_name = self.runtime_name extend_runtime(job, self.compute_handler, self.internal_storage) self.runtime_name = job.runtime_name logger.info('ExecutorID {} | JobID {} - Starting function ' 'invocation: {}() - Total: {} activations' .format(job.executor_id, job.job_id, job.function_name, job.total_calls)) logger.debug('ExecutorID {} | JobID {} - Worker processes: {} - Chunksize: {}' .format(job.executor_id, job.job_id, job.worker_processes, job.chunksize)) self.prometheus.send_metric( name='job_total_calls', value=job.total_calls, type='counter', labels=( ('job_id_', job.job_key), ('function_name', job.function_name) ) ) self.prometheus.send_metric( name='job_runtime_memory', value=job.runtime_memory or 0, type='counter', labels=( ('job_id_', job.job_key), ('function_name', job.function_name) ) ) try: job.runtime_name = self.runtime_name self._invoke_job(job) except (KeyboardInterrupt, Exception) as e: self.stop() raise e log_file = os.path.join(LOGS_DIR, job.job_key+'.log') logger.info("ExecutorID {} | JobID {} - View execution logs at {}" .format(job.executor_id, job.job_id, log_file)) futures = [] for i in range(job.total_calls): call_id = "{:05d}".format(i) fut = ResponseFuture(call_id, job, job.metadata.copy(), self.storage_config) fut._set_state(ResponseFuture.State.Invoked) futures.append(fut) job.futures = futures return futures def stop(self): pass class BatchInvoker(Invoker): def __init__(self, config, executor_id, internal_storage, compute_handler, job_monitor): super().__init__(config, executor_id, internal_storage, compute_handler, job_monitor) self.compute_handler.init() def _invoke_job(self, job): payload = self._create_payload(job) payload['call_ids'] = ["{:05d}".format(i) for i in range(job.total_calls)] start = time.time() activation_id = self.compute_handler.invoke(payload) roundtrip = time.time() - start resp_time = format(round(roundtrip, 3), '.3f') logger.debug('ExecutorID {} | JobID {} - Job invoked ({}s) - Activation ID: {}' .format(job.executor_id, job.job_id, resp_time, activation_id or job.job_key)) def run_job(self, job): total_workers = job.total_calls // job.chunksize + (job.total_calls % job.chunksize > 0) if self.max_workers < total_workers: job.chunksize = job.total_calls // self.max_workers + (job.total_calls % self.max_workers > 0) futures = self._run_job(job) self.job_monitor.start(futures) return futures class FaaSInvoker(Invoker): ASYNC_INVOKERS = 2 def __init__(self, config, executor_id, internal_storage, compute_handler, job_monitor): super().__init__(config, executor_id, internal_storage, compute_handler, job_monitor) remote_invoker = self.config[self.backend].get('remote_invoker', False) self.remote_invoker = remote_invoker if not is_lithops_worker() else False self.invokers = [] self.ongoing_activations = 0 self.pending_calls_q = queue.Queue() self.should_run = False self.sync = is_lithops_worker() invoke_pool_threads = self.config[self.backend]['invoke_pool_threads'] self.executor = ThreadPoolExecutor(invoke_pool_threads) logger.debug('ExecutorID {} - Serverless invoker created'.format(self.executor_id)) def _start_async_invokers(self): def invoker_process(inv_id): logger.debug('ExecutorID {} - Async invoker {} started' .format(self.executor_id, inv_id)) with ThreadPoolExecutor(max_workers=250) as executor: while self.should_run: try: self.job_monitor.token_bucket_q.get() job, call_ids_range = self.pending_calls_q.get() except KeyboardInterrupt: break if self.should_run: executor.submit(self._invoke_task, job, call_ids_range) else: break logger.debug('ExecutorID {} - Async invoker {} finished' .format(self.executor_id, inv_id)) for inv_id in range(self.ASYNC_INVOKERS): p = threading.Thread(target=invoker_process, args=(inv_id,)) self.invokers.append(p) p.daemon = True p.start() def stop(self): if self.invokers: logger.debug('ExecutorID {} - Stopping async invokers' .format(self.executor_id)) self.should_run = False while not self.pending_calls_q.empty(): try: self.pending_calls_q.get(False) except Exception: pass for invoker in self.invokers: self.job_monitor.token_bucket_q.put('$') self.pending_calls_q.put((None, None)) self.invokers = [] def _invoke_task(self, job, call_ids_range): payload = self._create_payload(job) call_ids = ["{:05d}".format(i) for i in call_ids_range] payload['call_ids'] = call_ids if job.data_key: data_byte_ranges = [job.data_byte_ranges[int(call_id)] for call_id in call_ids] payload['data_byte_ranges'] = data_byte_ranges else: del payload['data_byte_ranges'] payload['data_byte_strs'] = [job.data_byte_strs[int(call_id)] for call_id in call_ids] start = time.time() activation_id = self.compute_handler.invoke(payload) roundtrip = time.time() - start resp_time = format(round(roundtrip, 3), '.3f') if not activation_id: time.sleep(random.randint(0, 5)) self.pending_calls_q.put((job, call_ids_range)) self.job_monitor.token_bucket_q.put('#') return logger.debug('ExecutorID {} | JobID {} - Calls {} invoked ({}s) - Activation' ' ID: {}'.format(job.executor_id, job.job_id, ', '.join(call_ids), resp_time, activation_id)) def _invoke_job_remote(self, job): start = time.time() payload = {} payload['config'] = self.config payload['log_level'] = self.log_level payload['runtime_name'] = job.runtime_name payload['runtime_memory'] = job.runtime_memory payload['remote_invoker'] = True payload['job'] = job.__dict__ activation_id = self.compute_handler.invoke(payload) roundtrip = time.time() - start resp_time = format(round(roundtrip, 3), '.3f') if activation_id: logger.debug('ExecutorID {} | JobID {} - Remote invoker call done ({}s) - Activation' ' ID: {}'.format(job.executor_id, job.job_id, resp_time, activation_id)) else: raise Exception('Unable to spawn remote invoker')
Apache License 2.0
yxuansu/hcl
SABERT/transformer.py
LearnedPositionalEmbedding.forward
python
def forward(self, input, offset=0): seq_len, bsz = input.size() positions = (offset + torch.arange(seq_len)) if input.is_cuda: positions = positions.cuda(input.get_device()) res = self.weights(positions).unsqueeze(1).expand(-1, bsz, -1) return res
Input is expected to be of size [seq_len x bsz].
https://github.com/yxuansu/hcl/blob/b7a2871e8006d148958bb8dc65670d0491f6a446/SABERT/transformer.py#L225-L232
import torch from torch import nn from torch.nn import Parameter import torch.nn.functional as F import math class TransformerLayer(nn.Module): def __init__(self, embed_dim, ff_embed_dim, num_heads, dropout, with_external=False, weights_dropout = True): super(TransformerLayer, self).__init__() self.self_attn = MultiheadAttention(embed_dim, num_heads, dropout, weights_dropout) self.fc1 = nn.Linear(embed_dim, ff_embed_dim) self.fc2 = nn.Linear(ff_embed_dim, embed_dim) self.attn_layer_norm = nn.LayerNorm(embed_dim) self.ff_layer_norm = nn.LayerNorm(embed_dim) self.with_external = with_external self.dropout = dropout if self.with_external: self.external_attn = MultiheadAttention(embed_dim, num_heads, dropout, weights_dropout) self.external_layer_norm = nn.LayerNorm(embed_dim) self.reset_parameters() def reset_parameters(self): nn.init.xavier_uniform_(self.fc1.weight) nn.init.xavier_uniform_(self.fc2.weight) nn.init.constant_(self.fc1.bias, 0.) nn.init.constant_(self.fc2.bias, 0.) def forward(self, x, kv = None, self_padding_mask = None, self_attn_mask = None, external_memories = None, external_padding_mask=None, need_weights = False): residual = x if kv is None: x, self_attn = self.self_attn(query=x, key=x, value=x, key_padding_mask=self_padding_mask, attn_mask=self_attn_mask, need_weights = need_weights) else: x, self_attn = self.self_attn(query=x, key=kv, value=kv, key_padding_mask=self_padding_mask, attn_mask=self_attn_mask, need_weights = need_weights) x = F.dropout(x, p=self.dropout, training=self.training) x = self.attn_layer_norm(residual + x) if self.with_external: residual = x x, external_attn = self.external_attn(query=x, key=external_memories, value=external_memories, key_padding_mask=external_padding_mask, need_weights = need_weights) x = F.dropout(x, p=self.dropout, training=self.training) x = self.external_layer_norm(residual + x) else: external_attn = None residual = x x = F.relu(self.fc1(x)) x = F.dropout(x, p=self.dropout, training=self.training) x = self.fc2(x) x = F.dropout(x, p=self.dropout, training=self.training) x = self.ff_layer_norm(residual + x) return x, self_attn, external_attn class MultiheadAttention(nn.Module): def __init__(self, embed_dim, num_heads, dropout=0., weights_dropout=True): super(MultiheadAttention, self).__init__() self.embed_dim = embed_dim self.num_heads = num_heads self.dropout = dropout self.head_dim = embed_dim // num_heads assert self.head_dim * num_heads == self.embed_dim, "embed_dim must be divisible by num_heads" self.scaling = self.head_dim ** -0.5 self.in_proj_weight = Parameter(torch.Tensor(3 * embed_dim, embed_dim)) self.in_proj_bias = Parameter(torch.Tensor(3 * embed_dim)) self.out_proj = nn.Linear(embed_dim, embed_dim, bias=True) self.weights_dropout = weights_dropout self.reset_parameters() def reset_parameters(self): nn.init.xavier_uniform_(self.in_proj_weight) nn.init.xavier_uniform_(self.out_proj.weight) nn.init.constant_(self.in_proj_bias, 0.) nn.init.constant_(self.out_proj.bias, 0.) def forward(self, query, key, value, key_padding_mask=None, attn_mask=None, need_weights=False): qkv_same = query.data_ptr() == key.data_ptr() == value.data_ptr() kv_same = key.data_ptr() == value.data_ptr() tgt_len, bsz, embed_dim = query.size() assert key.size() == value.size() if qkv_same: q, k, v = self.in_proj_qkv(query) elif kv_same: q = self.in_proj_q(query) k, v = self.in_proj_kv(key) else: q = self.in_proj_q(query) k = self.in_proj_k(key) v = self.in_proj_v(value) q *= self.scaling q = q.contiguous().view(tgt_len, bsz * self.num_heads, self.head_dim).transpose(0, 1) k = k.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1) v = v.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1) src_len = k.size(1) attn_weights = torch.bmm(q, k.transpose(1, 2)) assert list(attn_weights.size()) == [bsz * self.num_heads, tgt_len, src_len] if attn_mask is not None: attn_weights = attn_weights.masked_fill( attn_mask.unsqueeze(0), float('-inf') ) if key_padding_mask is not None: attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights.masked_fill_( key_padding_mask.transpose(0, 1).unsqueeze(1).unsqueeze(2), float('-inf') ) attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) attn_weights = F.softmax(attn_weights, dim=-1) if self.weights_dropout: attn_weights = F.dropout(attn_weights, p=self.dropout, training=self.training) attn = torch.bmm(attn_weights, v) if not self.weights_dropout: attn = F.dropout(attn, p=self.dropout, training=self.training) assert list(attn.size()) == [bsz * self.num_heads, tgt_len, self.head_dim] attn = attn.transpose(0, 1).contiguous().view(tgt_len, bsz, embed_dim) attn = self.out_proj(attn) if need_weights: attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights, _ = attn_weights.max(dim=1) attn_weights = attn_weights.transpose(0, 1) else: attn_weights = None return attn, (attn_weights, v) def in_proj_qkv(self, query): return self._in_proj(query).chunk(3, dim=-1) def in_proj_kv(self, key): return self._in_proj(key, start=self.embed_dim).chunk(2, dim=-1) def in_proj_q(self, query): return self._in_proj(query, end=self.embed_dim) def in_proj_k(self, key): return self._in_proj(key, start=self.embed_dim, end=2 * self.embed_dim) def in_proj_v(self, value): return self._in_proj(value, start=2 * self.embed_dim) def _in_proj(self, input, start=0, end=None): weight = self.in_proj_weight bias = self.in_proj_bias weight = weight[start:end, :] if bias is not None: bias = bias[start:end] return F.linear(input, weight, bias) def Embedding(num_embeddings, embedding_dim, padding_idx): m = nn.Embedding(num_embeddings, embedding_dim, padding_idx=padding_idx) nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5) nn.init.constant_(m.weight[padding_idx], 0) return m def Seg_Embedding(num_embeddings, embedding_dim): m = nn.Embedding(num_embeddings, embedding_dim) nn.init.normal_(m.weight, mean=0, std=embedding_dim ** -0.5) return m class SelfAttentionMask(nn.Module): def __init__(self, init_size = 100, device = 0): super(SelfAttentionMask, self).__init__() self.weights = SelfAttentionMask.get_mask(init_size) self.device = device @staticmethod def get_mask(size): weights = torch.triu(torch.ones((size, size), dtype = torch.uint8), 1) return weights def forward(self, size): if self.weights is None or size > self.weights.size(0): self.weights = SelfAttentionMask.get_mask(size) res = self.weights[:size,:size].cuda(self.device).detach() return res class LearnedPositionalEmbedding(nn.Module): def __init__(self, embedding_dim, init_size=1024): super(LearnedPositionalEmbedding, self).__init__() self.weights = nn.Embedding(init_size, embedding_dim) self.reset_parameters() def reset_parameters(self): nn.init.normal_(self.weights.weight, std=0.02)
MIT License