repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
trevor/calendarserver
txdav/who/delegates.py
removeDelegate
python
def removeDelegate(txn, delegator, delegate, readWrite): if delegate.recordType == BaseRecordType.group: ( groupID, _ignore_name, _ignore_membershipHash, _ignore_modified, _ignore_extant ) = yield txn.groupByUID( delegate.uid ) yield txn.removeDelegateGroup(delegator.uid, groupID, readWrite) else: yield txn.removeDelegate(delegator.uid, delegate.uid, readWrite)
Removes "delegate" as a delegate of "delegator". The type of access is specified by the "readWrite" parameter. @param delegator: the delegator's directory record @type delegator: L{IDirectoryRecord} @param delegate: the delegate's directory record @type delegate: L{IDirectoryRecord} @param readWrite: if True, read and write access is revoked; read-only access otherwise
https://github.com/trevor/calendarserver/blob/c9970b06a70445ca75b62e3d170c26bc897a035e/txdav/who/delegates.py#L252-L274
from twisted.python.constants import Names, NamedConstant from twisted.internet.defer import inlineCallbacks, returnValue, succeed from twext.python.log import Logger from twext.who.idirectory import ( RecordType as BaseRecordType, FieldName, NotAllowedError ) from twext.who.directory import ( DirectoryService as BaseDirectoryService, DirectoryRecord as BaseDirectoryRecord ) from twext.who.expression import MatchExpression, MatchType log = Logger() class RecordType(Names): readDelegateGroup = NamedConstant() readDelegateGroup.description = u"read-delegate-group" writeDelegateGroup = NamedConstant() writeDelegateGroup.description = u"write-delegate-group" readDelegatorGroup = NamedConstant() readDelegatorGroup.description = u"read-delegator-group" writeDelegatorGroup = NamedConstant() writeDelegatorGroup.description = u"write-delegator-group" class DirectoryRecord(BaseDirectoryRecord): @inlineCallbacks def members(self, expanded=False): parentUID, _ignore_proxyType = self.uid.split(u"#") @inlineCallbacks def _members(txn): if self.recordType in ( RecordType.readDelegateGroup, RecordType.writeDelegateGroup ): readWrite = (self.recordType is RecordType.writeDelegateGroup) delegateUIDs = ( yield txn.delegates(parentUID, readWrite, expanded=expanded) ) else: readWrite = (self.recordType is RecordType.writeDelegatorGroup) delegateUIDs = ( yield txn.delegators(parentUID, readWrite) ) returnValue(delegateUIDs) delegateUIDs = yield self.service._store.inTransaction( "DirectoryRecord.members", _members ) records = [] for uid in delegateUIDs: if uid != parentUID: record = yield self.service._masterDirectory.recordWithUID(uid) if record is not None: records.append(record) returnValue(records) @inlineCallbacks def setMembers(self, memberRecords): if self.recordType not in ( RecordType.readDelegateGroup, RecordType.writeDelegateGroup ): raise NotAllowedError("Setting members not supported") parentUID, _ignore_proxyType = self.uid.split(u"#") readWrite = (self.recordType is RecordType.writeDelegateGroup) log.debug( "Setting delegate assignments for {u} ({rw}) to {m}", u=parentUID, rw=("write" if readWrite else "read"), m=[r.uid for r in memberRecords] ) @inlineCallbacks def _setMembers(txn): yield txn.removeDelegates(parentUID, readWrite) yield txn.removeDelegateGroups(parentUID, readWrite) delegator = ( yield self.service._masterDirectory.recordWithUID(parentUID) ) for delegate in memberRecords: yield addDelegate(txn, delegator, delegate, readWrite) yield self.service._store.inTransaction( "DirectoryRecord.setMembers", _setMembers ) def recordTypeToProxyType(recordType): return { RecordType.readDelegateGroup: "calendar-proxy-read", RecordType.writeDelegateGroup: "calendar-proxy-write", RecordType.readDelegatorGroup: "calendar-proxy-read-for", RecordType.writeDelegatorGroup: "calendar-proxy-write-for", }.get(recordType, None) def proxyTypeToRecordType(proxyType): return { "calendar-proxy-read": RecordType.readDelegateGroup, "calendar-proxy-write": RecordType.writeDelegateGroup, "calendar-proxy-read-for": RecordType.readDelegatorGroup, "calendar-proxy-write-for": RecordType.writeDelegatorGroup, }.get(proxyType, None) class DirectoryService(BaseDirectoryService): recordType = RecordType def __init__(self, realmName, store): BaseDirectoryService.__init__(self, realmName) self._store = store self._masterDirectory = None def setMasterDirectory(self, masterDirectory): self._masterDirectory = masterDirectory def recordWithShortName(self, recordType, shortName): uid = shortName + "#" + recordTypeToProxyType(recordType) record = DirectoryRecord(self, { FieldName.uid: uid, FieldName.recordType: recordType, FieldName.shortNames: (uid,), }) return succeed(record) def recordWithUID(self, uid): if "#" not in uid: return succeed(None) uid, proxyType = uid.split("#") recordType = proxyTypeToRecordType(proxyType) if recordType is None: return succeed(None) return self.recordWithShortName(recordType, uid) @inlineCallbacks def recordsFromExpression(self, expression, recordTypes=None, records=None): if isinstance(expression, MatchExpression): if( (expression.fieldName is FieldName.uid) and (expression.matchType is MatchType.equals) and ("#" in expression.fieldValue) ): record = yield self.recordWithUID(expression.fieldValue) if record is not None: returnValue((record,)) returnValue(()) @inlineCallbacks def addDelegate(txn, delegator, delegate, readWrite): if delegate.recordType == BaseRecordType.group: ( groupID, _ignore_name, _ignore_membershipHash, _ignore_modified, _ignore_extant ) = yield txn.groupByUID( delegate.uid ) yield txn.addDelegateGroup(delegator.uid, groupID, readWrite) else: yield txn.addDelegate(delegator.uid, delegate.uid, readWrite) @inlineCallbacks
Apache License 2.0
facebookresearch/nevergrad
nevergrad/parametrization/_datalayers.py
BoundLayer.get_normalized_value
python
def get_normalized_value(self) -> np.ndarray: value = self._layers[self._layer_index]._layered_get_value() return self._normalizer().forward(value)
Gets a value normalized between 0 and 1
https://github.com/facebookresearch/nevergrad/blob/1981997603e361b1fd5b5e2aeb8173c4eae6aef0/nevergrad/parametrization/_datalayers.py#L133-L136
import warnings import functools import numpy as np import nevergrad.common.typing as tp from nevergrad.common import errors from . import _layering from ._layering import Int as Int from . import data as _data from .data import Data from .core import Parameter from . import discretization from . import transforms as trans from . import utils D = tp.TypeVar("D", bound=Data) Op = tp.TypeVar("Op", bound="Operation") BL = tp.TypeVar("BL", bound="BoundLayer") class Operation(_layering.Layered, _layering.Filterable): _LAYER_LEVEL = _layering.Level.OPERATION _LEGACY = False def __init__(self, *args: tp.Any, **kwargs: tp.Any) -> None: super().__init__() if any(isinstance(x, Parameter) for x in args + tuple(kwargs.values())): raise errors.NevergradTypeError("Operation with Parameter instances are not supported") class BoundLayer(Operation): _LAYER_LEVEL = _layering.Level.OPERATION def __init__( self, lower: tp.BoundValue = None, upper: tp.BoundValue = None, uniform_sampling: tp.Optional[bool] = None, ) -> None: super().__init__(lower, upper, uniform_sampling) self.bounds: tp.Tuple[tp.Optional[np.ndarray], tp.Optional[np.ndarray]] = tuple( None if a is None else trans.bound_to_array(a) for a in (lower, upper) ) both_bounds = all(b is not None for b in self.bounds) self.uniform_sampling: bool = uniform_sampling if uniform_sampling is None: self.uniform_sampling = both_bounds if self.uniform_sampling and not both_bounds: raise errors.NevergradValueError("Cannot use full range sampling if both bounds are not set") if not (lower is None or upper is None): if (self.bounds[0] >= self.bounds[1]).any(): raise errors.NevergradValueError( f"Lower bounds {lower} should be strictly smaller than upper bounds {upper}" ) def _normalizer(self) -> trans.Transform: if any(b is None for b in self.bounds): raise RuntimeError("Cannot use normalized value for not-fully bounded Parameter") return trans.Affine(self.bounds[1] - self.bounds[0], self.bounds[0]).reverted() def __call__(self, data: D, inplace: bool = False) -> D: new = data if inplace else data.copy() value = new.value new.add_layer(self.copy()) try: new.value = value except ValueError as e: raise errors.NevergradValueError( "Current value is not within bounds, please update it first" ) from e if all(x is not None for x in self.bounds): tests = [data.copy() for _ in range(2)] with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) for test, bound in zip(tests, self.bounds): val = bound * np.ones(value.shape) if isinstance(value, np.ndarray) else bound[0] test.value = val state = tests[0].get_standardized_data(reference=tests[1]) min_dist = np.min(np.abs(state)) if min_dist < 3.0: warnings.warn( f"Bounds are {min_dist} sigma away from each other at the closest, " "you should aim for at least 3 for better quality.", errors.NevergradRuntimeWarning, ) return new def _layered_sample(self) -> Data: if not self.uniform_sampling: return super()._layered_sample() root = self._layers[0] if not isinstance(root, Data): raise errors.NevergradTypeError(f"BoundLayer {self} on a non-Data root {root}") shape = super()._layered_get_value().shape child = root.spawn_child() new_val = self.random_state.uniform(size=shape) del child.value child._layers[self._layer_index].set_normalized_value(new_val) return child def set_normalized_value(self, value: np.ndarray) -> None: new_val = self._normalizer().backward(value) self._layers[self._layer_index]._layered_set_value(new_val)
MIT License
lateral/hyperplane-hasher
key_value_store.py
KeyValueStore.get_set
python
def get_set(self, set_id): pass
Returns a copy of the set at id 'set_id'. Raises KeyError if 'set_id' is unknown.
https://github.com/lateral/hyperplane-hasher/blob/41c0969087f30cb0c5876f860d234028e44692d9/key_value_store.py#L81-L84
import numpy as np class KeyValueStore(object): def get_vector_ids(self): pass def get_int_ids(self): pass def get_set_ids(self): pass def store_vector(self, vec_id, vector): pass def bulk_store_vector(self, vec_ids, vectors): if len(vec_ids) != len(vectors): raise ValueError for vec_id, vector in zip(vec_ids, vectors): self.store_vector(vec_id, vector) def get_vector(self, vec_id): pass def bulk_get_vector(self, vec_ids): pass def remove_vector(self, vec_id): pass def add_to_set(self, set_id, element_id): pass def bulk_add_to_set(self, set_ids, element_ids): if len(set_ids) != len(element_ids): raise ValueError for set_id, element_id in zip(set_ids, element_ids): self.add_to_set(set_id, element_id) def remove_from_set(self, set_id, element_id): pass def remove_set(self, set_id): pass
MIT License
opennetworkingfoundation/tapi
RI/flask_server/tapi_server/models/tapi_topology_node.py
TapiTopologyNode.latency_characteristic
python
def latency_characteristic(self, latency_characteristic): self._latency_characteristic = latency_characteristic
Sets the latency_characteristic of this TapiTopologyNode. The effect on the latency of a queuing process. This only has significant effect for packet based systems and has a complex characteristic. # noqa: E501 :param latency_characteristic: The latency_characteristic of this TapiTopologyNode. :type latency_characteristic: List[TapiTopologyLatencyCharacteristic]
https://github.com/opennetworkingfoundation/tapi/blob/1f3fd9483d5674552c5a31206c97399c8c151897/RI/flask_server/tapi_server/models/tapi_topology_node.py#L482-L491
from __future__ import absolute_import from datetime import date, datetime from typing import List, Dict from tapi_server.models.base_model_ import Model from tapi_server.models.tapi_common_admin_state_pac import TapiCommonAdminStatePac from tapi_server.models.tapi_common_administrative_state import TapiCommonAdministrativeState from tapi_server.models.tapi_common_capacity import TapiCommonCapacity from tapi_server.models.tapi_common_capacity_pac import TapiCommonCapacityPac from tapi_server.models.tapi_common_global_class import TapiCommonGlobalClass from tapi_server.models.tapi_common_layer_protocol_name import TapiCommonLayerProtocolName from tapi_server.models.tapi_common_lifecycle_state import TapiCommonLifecycleState from tapi_server.models.tapi_common_name_and_value import TapiCommonNameAndValue from tapi_server.models.tapi_common_operational_state import TapiCommonOperationalState from tapi_server.models.tapi_topology_cost_characteristic import TapiTopologyCostCharacteristic from tapi_server.models.tapi_topology_latency_characteristic import TapiTopologyLatencyCharacteristic from tapi_server.models.tapi_topology_node_edge_point import TapiTopologyNodeEdgePoint from tapi_server.models.tapi_topology_node_edge_point_ref import TapiTopologyNodeEdgePointRef from tapi_server.models.tapi_topology_node_rule_group import TapiTopologyNodeRuleGroup from tapi_server.models.tapi_topology_topology_ref import TapiTopologyTopologyRef from tapi_server.models.tapi_topology_transfer_cost_pac import TapiTopologyTransferCostPac from tapi_server.models.tapi_topology_transfer_integrity_pac import TapiTopologyTransferIntegrityPac from tapi_server.models.tapi_topology_transfer_timing_pac import TapiTopologyTransferTimingPac from tapi_server import util class TapiTopologyNode(Model): def __init__(self, operational_state=None, lifecycle_state=None, administrative_state=None, available_capacity=None, total_potential_capacity=None, name=None, uuid=None, cost_characteristic=None, error_characteristic=None, unavailable_time_characteristic=None, server_integrity_process_characteristic=None, delivery_order_characteristic=None, repeat_delivery_characteristic=None, loss_characteristic=None, latency_characteristic=None, layer_protocol_name=None, encap_topology=None, owned_node_edge_point=None, node_rule_group=None, aggregated_node_edge_point=None): self.openapi_types = { 'operational_state': TapiCommonOperationalState, 'lifecycle_state': TapiCommonLifecycleState, 'administrative_state': TapiCommonAdministrativeState, 'available_capacity': TapiCommonCapacity, 'total_potential_capacity': TapiCommonCapacity, 'name': List[TapiCommonNameAndValue], 'uuid': str, 'cost_characteristic': List[TapiTopologyCostCharacteristic], 'error_characteristic': str, 'unavailable_time_characteristic': str, 'server_integrity_process_characteristic': str, 'delivery_order_characteristic': str, 'repeat_delivery_characteristic': str, 'loss_characteristic': str, 'latency_characteristic': List[TapiTopologyLatencyCharacteristic], 'layer_protocol_name': List[TapiCommonLayerProtocolName], 'encap_topology': TapiTopologyTopologyRef, 'owned_node_edge_point': List[TapiTopologyNodeEdgePoint], 'node_rule_group': List[TapiTopologyNodeRuleGroup], 'aggregated_node_edge_point': List[TapiTopologyNodeEdgePointRef] } self.attribute_map = { 'operational_state': 'operational-state', 'lifecycle_state': 'lifecycle-state', 'administrative_state': 'administrative-state', 'available_capacity': 'available-capacity', 'total_potential_capacity': 'total-potential-capacity', 'name': 'name', 'uuid': 'uuid', 'cost_characteristic': 'cost-characteristic', 'error_characteristic': 'error-characteristic', 'unavailable_time_characteristic': 'unavailable-time-characteristic', 'server_integrity_process_characteristic': 'server-integrity-process-characteristic', 'delivery_order_characteristic': 'delivery-order-characteristic', 'repeat_delivery_characteristic': 'repeat-delivery-characteristic', 'loss_characteristic': 'loss-characteristic', 'latency_characteristic': 'latency-characteristic', 'layer_protocol_name': 'layer-protocol-name', 'encap_topology': 'encap-topology', 'owned_node_edge_point': 'owned-node-edge-point', 'node_rule_group': 'node-rule-group', 'aggregated_node_edge_point': 'aggregated-node-edge-point' } self._operational_state = operational_state self._lifecycle_state = lifecycle_state self._administrative_state = administrative_state self._available_capacity = available_capacity self._total_potential_capacity = total_potential_capacity self._name = name self._uuid = uuid self._cost_characteristic = cost_characteristic self._error_characteristic = error_characteristic self._unavailable_time_characteristic = unavailable_time_characteristic self._server_integrity_process_characteristic = server_integrity_process_characteristic self._delivery_order_characteristic = delivery_order_characteristic self._repeat_delivery_characteristic = repeat_delivery_characteristic self._loss_characteristic = loss_characteristic self._latency_characteristic = latency_characteristic self._layer_protocol_name = layer_protocol_name self._encap_topology = encap_topology self._owned_node_edge_point = owned_node_edge_point self._node_rule_group = node_rule_group self._aggregated_node_edge_point = aggregated_node_edge_point @classmethod def from_dict(cls, dikt) -> 'TapiTopologyNode': return util.deserialize_model(dikt, cls) @property def operational_state(self): return self._operational_state @operational_state.setter def operational_state(self, operational_state): self._operational_state = operational_state @property def lifecycle_state(self): return self._lifecycle_state @lifecycle_state.setter def lifecycle_state(self, lifecycle_state): self._lifecycle_state = lifecycle_state @property def administrative_state(self): return self._administrative_state @administrative_state.setter def administrative_state(self, administrative_state): self._administrative_state = administrative_state @property def available_capacity(self): return self._available_capacity @available_capacity.setter def available_capacity(self, available_capacity): self._available_capacity = available_capacity @property def total_potential_capacity(self): return self._total_potential_capacity @total_potential_capacity.setter def total_potential_capacity(self, total_potential_capacity): self._total_potential_capacity = total_potential_capacity @property def name(self): return self._name @name.setter def name(self, name): self._name = name @property def uuid(self): return self._uuid @uuid.setter def uuid(self, uuid): self._uuid = uuid @property def cost_characteristic(self): return self._cost_characteristic @cost_characteristic.setter def cost_characteristic(self, cost_characteristic): self._cost_characteristic = cost_characteristic @property def error_characteristic(self): return self._error_characteristic @error_characteristic.setter def error_characteristic(self, error_characteristic): self._error_characteristic = error_characteristic @property def unavailable_time_characteristic(self): return self._unavailable_time_characteristic @unavailable_time_characteristic.setter def unavailable_time_characteristic(self, unavailable_time_characteristic): self._unavailable_time_characteristic = unavailable_time_characteristic @property def server_integrity_process_characteristic(self): return self._server_integrity_process_characteristic @server_integrity_process_characteristic.setter def server_integrity_process_characteristic(self, server_integrity_process_characteristic): self._server_integrity_process_characteristic = server_integrity_process_characteristic @property def delivery_order_characteristic(self): return self._delivery_order_characteristic @delivery_order_characteristic.setter def delivery_order_characteristic(self, delivery_order_characteristic): self._delivery_order_characteristic = delivery_order_characteristic @property def repeat_delivery_characteristic(self): return self._repeat_delivery_characteristic @repeat_delivery_characteristic.setter def repeat_delivery_characteristic(self, repeat_delivery_characteristic): self._repeat_delivery_characteristic = repeat_delivery_characteristic @property def loss_characteristic(self): return self._loss_characteristic @loss_characteristic.setter def loss_characteristic(self, loss_characteristic): self._loss_characteristic = loss_characteristic @property def latency_characteristic(self): return self._latency_characteristic @latency_characteristic.setter
Apache License 2.0
state-of-the-art/blendnet
BlendNet/TaskBase.py
TaskBase.executionMessagesGet
python
def executionMessagesGet(self): with self._execution_messages_lock: return self._execution_messages.copy()
Variety of details about the execution
https://github.com/state-of-the-art/blendnet/blob/a313333f109c8d7cae8615f747767a42576e3d21/BlendNet/TaskBase.py#L358-L361
import bpy import os, sys import json import time import threading import subprocess from enum import Enum from random import randrange from abc import ABC, abstractmethod from .Config import Config from . import utils class TaskConfig(Config): _defs = { 'project': { 'description': '''Set the project file will be used to render''', 'type': str, }, 'project_path': { 'description': '''Absolute path to the project dir, required to resolve `//../dir/file`''', 'type': str, 'validation': lambda cfg, val: utils.isPathAbsolute(val) and utils.isPathStraight(val), }, 'cwd_path': { 'description': '''Absolute path to the current working dir, required to resolve `dir/file`''', 'type': str, 'validation': lambda cfg, val: utils.isPathAbsolute(val) and utils.isPathStraight(val), }, 'samples': { 'description': '''How much samples to process for the task''', 'type': int, 'min': 1, 'default': 100, }, 'seed': { 'description': '''Seed to use during render (or random will be used)''', 'type': int, 'min': 0, 'max': 2147483647, 'value': lambda cfg: randrange(0, 2147483647), }, 'frame': { 'description': '''Set the frame to render (or current one will be used)''', 'type': int, 'min': 0, }, } class TaskState(Enum): CREATED = 0 STOPPED = 1 PENDING = 2 RUNNING = 3 COMPLETED = 4 ERROR = 5 class TaskBase(ABC): def __init__(self, parent, name, config, data = dict()): print('DEBUG: Creating new task %s' % name) if not isinstance(config, TaskConfig): raise Exception('Unable to set task with configuration %s' % type(config)) self._parent = parent self._cfg = config self._name = name self._create_time = data.get('create_time', int(time.time())) self._start_time = data.get('start_time') self._end_time = data.get('end_time') self._status_lock = threading.Lock() self._status = data.get('status', { 'samples_done': 0, 'remaining': None, 'result': { 'preview': None, 'render': None, }, }) self._state_lock = threading.Lock() state_name = data.get('state', TaskState.CREATED.name) self._state = TaskState[state_name] if state_name != TaskState.RUNNING.name else TaskState.STOPPED self._state_error_info = data.get('state_error_info', []) self._execution_lock = threading.Lock() self._execution_watcher = None self._execution_details_lock = threading.Lock() self._execution_details = data.get('execution_details', {}) self._execution_messages_lock = threading.Lock() self._execution_messages = data.get('execution_messages', {}) self._files_lock = threading.Lock() self._files = data.get('files', {}) if 'config' in data: self._cfg.configsSet(data['config']) def __del__(self): print('DEBUG: Deleting task %s' % self.name()) self.stop() def name(self): return self._name def snapshot(self): out = { 'config': self._cfg.configsGet(), 'status': self._status.copy(), 'name': self._name, 'create_time': self._create_time, 'start_time': self._start_time, 'end_time': self._end_time, 'state': self._state.name, 'state_error_info': self._state_error_info, 'execution_details': self._execution_details.copy(), 'execution_messages': self._execution_messages.copy(), 'files': self._files.copy(), } return out def info(self): out = { 'name': self.name(), 'create_time': self._create_time, 'start_time': self._start_time, 'end_time': self._end_time, 'state': self._state.name, 'frame': self._cfg.frame, 'done': self._status['samples_done'] / self._cfg.samples, } if self._state_error_info: out['state_error_info'] = self._state_error_info return out def status(self): with self._status_lock: out = self.info() out.update({ 'project': self._cfg.project, 'samples': self._cfg.samples, 'seed': self._cfg.seed, }) out.update(self._status) return out def statusRemainingSet(self, remaining): with self._status_lock: self._status['remaining'] = remaining def statusSamplesDoneSet(self, samples): with self._status_lock: self._status['samples_done'] = samples def statusPreviewSet(self, blob_id): with self._status_lock: self._status['result']['preview'] = blob_id def statusRenderSet(self, blob_id): with self._status_lock: self._status['result']['render'] = blob_id def canBeChanged(self): with self._state_lock: return self._state == TaskState.CREATED def isPending(self): with self._state_lock: return self._state == TaskState.PENDING def isRunning(self): with self._state_lock: return self._state == TaskState.RUNNING def isCompleted(self): with self._state_lock: return self._state == TaskState.COMPLETED def isError(self): with self._state_lock: return self._state == TaskState.ERROR def isStopped(self): with self._state_lock: return self._state == TaskState.STOPPED def isEnded(self): with self._state_lock: return self._state in (TaskState.COMPLETED, TaskState.STOPPED, TaskState.ERROR) def stateCreated(self): with self._state_lock: if self._state == TaskState.PENDING: self.stateSet(TaskState.CREATED) def statePending(self): with self._state_lock: if self._state in (TaskState.CREATED, TaskState.STOPPED): self.stateSet(TaskState.PENDING) def stateStop(self): with self._state_lock: if self._state == TaskState.RUNNING: self._end_time = int(time.time()) self.stateSet(TaskState.STOPPED) def stateComplete(self): with self._state_lock: if self._state == TaskState.RUNNING: self._end_time = int(time.time()) self.stateSet(TaskState.COMPLETED) def stateError(self, info): with self._state_lock: self._state_error_info.append(info) self._end_time = int(time.time()) self.stateSet(TaskState.ERROR) def stateSet(self, state): self._state = state def fileAdd(self, path, file_id): if '../' in path: return print('WARN: Unable to use path with absolute path or contains parent dir symlink') if not self.canBeChanged(): return print('WARN: Unable to change the task once started') with self._files_lock: self._files.update({path: file_id}) return True def fileGet(self, path): with self._files_lock: return self._files.get(path) def filesGet(self): with self._files_lock: return self._files.copy() def filesPathsFix(self, path = None): result = True tocheck = [path] if path else list(self._files.keys()) for p in tocheck: if not utils.isPathStraight(p): print('ERROR: Path is not straight:', p) return False if not self._cfg.project_path or not self._cfg.cwd_path: result = None continue if not utils.isPathAbsolute(p): new_p = p if p.startswith('//'): new_p = self._cfg.project_path + p[2:].lstrip('/') else: new_p = self._cfg.cwd_path + p.lstrip('/') with self._files_lock: self._files[new_p] = self._files.pop(p) print('DEBUG: Fixed path:', p, new_p) return result def run(self): with self._state_lock: if self._state not in (TaskState.CREATED, TaskState.STOPPED): print('WARN: Unable to run already started task') return True print('DEBUG: Running task', self.name()) if not self.check(): print('ERROR: Task check fail:', self.name()) return False return self._parent.taskAddToPending(self) def start(self): with self._execution_lock: with self._state_lock: self._state = TaskState.RUNNING if not self._execution_watcher: self._start_time = int(time.time()) self._execution_watcher = threading.Thread(target=self._executionWatcher) self._execution_watcher.start() print('INFO: Task %s started execution' % self.name()) @abstractmethod def _executionWatcher(self): def stop(self): if self.isPending(): self._parent.taskRemoveFromPending(self) if self.isRunning(): self._stop() @abstractmethod def _stop(self): def executionDetailsGet(self): with self._execution_details_lock: return self._execution_details.copy() def executionDetailsAdd(self, details, task = None): if not isinstance(details, list): details = [details] if not isinstance(task, str): task = self.name() self.executionDetailsSet(self._execution_details.get(task, []) + details, task) def executionDetailsSet(self, details, task = None): with self._execution_details_lock: if task: self._execution_details[task] = details else: self._execution_details = details
Apache License 2.0
google-research/albert
squad_utils.py
convert_examples_to_features
python
def convert_examples_to_features(examples, tokenizer, max_seq_length, doc_stride, max_query_length, is_training, output_fn, do_lower_case): cnt_pos, cnt_neg = 0, 0 unique_id = 1000000000 max_n, max_m = 1024, 1024 f = np.zeros((max_n, max_m), dtype=np.float32) for (example_index, example) in enumerate(examples): if example_index % 100 == 0: tf.logging.info("Converting {}/{} pos {} neg {}".format( example_index, len(examples), cnt_pos, cnt_neg)) query_tokens = tokenization.encode_ids( tokenizer.sp_model, tokenization.preprocess_text( example.question_text, lower=do_lower_case)) if len(query_tokens) > max_query_length: query_tokens = query_tokens[0:max_query_length] paragraph_text = example.paragraph_text para_tokens = tokenization.encode_pieces( tokenizer.sp_model, tokenization.preprocess_text( example.paragraph_text, lower=do_lower_case), return_unicode=False) chartok_to_tok_index = [] tok_start_to_chartok_index = [] tok_end_to_chartok_index = [] char_cnt = 0 para_tokens = [six.ensure_text(token, "utf-8") for token in para_tokens] for i, token in enumerate(para_tokens): new_token = six.ensure_text(token).replace( tokenization.SPIECE_UNDERLINE.decode("utf-8"), " ") chartok_to_tok_index.extend([i] * len(new_token)) tok_start_to_chartok_index.append(char_cnt) char_cnt += len(new_token) tok_end_to_chartok_index.append(char_cnt - 1) tok_cat_text = "".join(para_tokens).replace( tokenization.SPIECE_UNDERLINE.decode("utf-8"), " ") n, m = len(paragraph_text), len(tok_cat_text) if n > max_n or m > max_m: max_n = max(n, max_n) max_m = max(m, max_m) f = np.zeros((max_n, max_m), dtype=np.float32) g = {} def _lcs_match(max_dist, n=n, m=m): f.fill(0) g.clear() for i in range(n): for j in range(i - max_dist, i + max_dist): if j >= m or j < 0: continue if i > 0: g[(i, j)] = 0 f[i, j] = f[i - 1, j] if j > 0 and f[i, j - 1] > f[i, j]: g[(i, j)] = 1 f[i, j] = f[i, j - 1] f_prev = f[i - 1, j - 1] if i > 0 and j > 0 else 0 if (tokenization.preprocess_text( paragraph_text[i], lower=do_lower_case, remove_space=False) == tok_cat_text[j] and f_prev + 1 > f[i, j]): g[(i, j)] = 2 f[i, j] = f_prev + 1 max_dist = abs(n - m) + 5 for _ in range(2): _lcs_match(max_dist) if f[n - 1, m - 1] > 0.8 * n: break max_dist *= 2 orig_to_chartok_index = [None] * n chartok_to_orig_index = [None] * m i, j = n - 1, m - 1 while i >= 0 and j >= 0: if (i, j) not in g: break if g[(i, j)] == 2: orig_to_chartok_index[i] = j chartok_to_orig_index[j] = i i, j = i - 1, j - 1 elif g[(i, j)] == 1: j = j - 1 else: i = i - 1 if (all(v is None for v in orig_to_chartok_index) or f[n - 1, m - 1] < 0.8 * n): tf.logging.info("MISMATCH DETECTED!") continue tok_start_to_orig_index = [] tok_end_to_orig_index = [] for i in range(len(para_tokens)): start_chartok_pos = tok_start_to_chartok_index[i] end_chartok_pos = tok_end_to_chartok_index[i] start_orig_pos = _convert_index(chartok_to_orig_index, start_chartok_pos, n, is_start=True) end_orig_pos = _convert_index(chartok_to_orig_index, end_chartok_pos, n, is_start=False) tok_start_to_orig_index.append(start_orig_pos) tok_end_to_orig_index.append(end_orig_pos) if not is_training: tok_start_position = tok_end_position = None if is_training and example.is_impossible: tok_start_position = 0 tok_end_position = 0 if is_training and not example.is_impossible: start_position = example.start_position end_position = start_position + len(example.orig_answer_text) - 1 start_chartok_pos = _convert_index(orig_to_chartok_index, start_position, is_start=True) tok_start_position = chartok_to_tok_index[start_chartok_pos] end_chartok_pos = _convert_index(orig_to_chartok_index, end_position, is_start=False) tok_end_position = chartok_to_tok_index[end_chartok_pos] assert tok_start_position <= tok_end_position def _piece_to_id(x): if six.PY2 and isinstance(x, six.text_type): x = six.ensure_binary(x, "utf-8") return tokenizer.sp_model.PieceToId(x) all_doc_tokens = list(map(_piece_to_id, para_tokens)) max_tokens_for_doc = max_seq_length - len(query_tokens) - 3 _DocSpan = collections.namedtuple( "DocSpan", ["start", "length"]) doc_spans = [] start_offset = 0 while start_offset < len(all_doc_tokens): length = len(all_doc_tokens) - start_offset if length > max_tokens_for_doc: length = max_tokens_for_doc doc_spans.append(_DocSpan(start=start_offset, length=length)) if start_offset + length == len(all_doc_tokens): break start_offset += min(length, doc_stride) for (doc_span_index, doc_span) in enumerate(doc_spans): tokens = [] token_is_max_context = {} segment_ids = [] p_mask = [] cur_tok_start_to_orig_index = [] cur_tok_end_to_orig_index = [] tokens.append(tokenizer.sp_model.PieceToId("[CLS]")) segment_ids.append(0) p_mask.append(0) for token in query_tokens: tokens.append(token) segment_ids.append(0) p_mask.append(1) tokens.append(tokenizer.sp_model.PieceToId("[SEP]")) segment_ids.append(0) p_mask.append(1) for i in range(doc_span.length): split_token_index = doc_span.start + i cur_tok_start_to_orig_index.append( tok_start_to_orig_index[split_token_index]) cur_tok_end_to_orig_index.append( tok_end_to_orig_index[split_token_index]) is_max_context = _check_is_max_context(doc_spans, doc_span_index, split_token_index) token_is_max_context[len(tokens)] = is_max_context tokens.append(all_doc_tokens[split_token_index]) segment_ids.append(1) p_mask.append(0) tokens.append(tokenizer.sp_model.PieceToId("[SEP]")) segment_ids.append(1) p_mask.append(1) paragraph_len = len(tokens) input_ids = tokens input_mask = [1] * len(input_ids) while len(input_ids) < max_seq_length: input_ids.append(0) input_mask.append(0) segment_ids.append(0) p_mask.append(1) assert len(input_ids) == max_seq_length assert len(input_mask) == max_seq_length assert len(segment_ids) == max_seq_length span_is_impossible = example.is_impossible start_position = None end_position = None if is_training and not span_is_impossible: doc_start = doc_span.start doc_end = doc_span.start + doc_span.length - 1 out_of_span = False if not (tok_start_position >= doc_start and tok_end_position <= doc_end): out_of_span = True if out_of_span: start_position = 0 end_position = 0 span_is_impossible = True else: doc_offset = len(query_tokens) + 2 start_position = tok_start_position - doc_start + doc_offset end_position = tok_end_position - doc_start + doc_offset if is_training and span_is_impossible: start_position = 0 end_position = 0 if example_index < 20: tf.logging.info("*** Example ***") tf.logging.info("unique_id: %s" % (unique_id)) tf.logging.info("example_index: %s" % (example_index)) tf.logging.info("doc_span_index: %s" % (doc_span_index)) tf.logging.info("tok_start_to_orig_index: %s" % " ".join( [str(x) for x in cur_tok_start_to_orig_index])) tf.logging.info("tok_end_to_orig_index: %s" % " ".join( [str(x) for x in cur_tok_end_to_orig_index])) tf.logging.info("token_is_max_context: %s" % " ".join([ "%d:%s" % (x, y) for (x, y) in six.iteritems(token_is_max_context) ])) tf.logging.info("input_pieces: %s" % " ".join( [tokenizer.sp_model.IdToPiece(x) for x in tokens])) tf.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) tf.logging.info( "input_mask: %s" % " ".join([str(x) for x in input_mask])) tf.logging.info( "segment_ids: %s" % " ".join([str(x) for x in segment_ids])) if is_training and span_is_impossible: tf.logging.info("impossible example span") if is_training and not span_is_impossible: pieces = [tokenizer.sp_model.IdToPiece(token) for token in tokens[start_position: (end_position + 1)]] answer_text = tokenizer.sp_model.DecodePieces(pieces) tf.logging.info("start_position: %d" % (start_position)) tf.logging.info("end_position: %d" % (end_position)) tf.logging.info( "answer: %s" % (tokenization.printable_text(answer_text))) if is_training: feat_example_index = None else: feat_example_index = example_index feature = InputFeatures( unique_id=unique_id, example_index=feat_example_index, doc_span_index=doc_span_index, tok_start_to_orig_index=cur_tok_start_to_orig_index, tok_end_to_orig_index=cur_tok_end_to_orig_index, token_is_max_context=token_is_max_context, tokens=[tokenizer.sp_model.IdToPiece(x) for x in tokens], input_ids=input_ids, input_mask=input_mask, segment_ids=segment_ids, paragraph_len=paragraph_len, start_position=start_position, end_position=end_position, is_impossible=span_is_impossible, p_mask=p_mask) output_fn(feature) unique_id += 1 if span_is_impossible: cnt_neg += 1 else: cnt_pos += 1 tf.logging.info("Total number of instances: {} = pos {} neg {}".format( cnt_pos + cnt_neg, cnt_pos, cnt_neg))
Loads a data file into a list of `InputBatch`s.
https://github.com/google-research/albert/blob/9196d09715b96e8f27a4f1ba2b0d42f3a514e945/squad_utils.py#L216-L537
from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import json import math import re import string import sys from albert import fine_tuning_utils from albert import modeling from albert import optimization from albert import tokenization import numpy as np import six from six.moves import map from six.moves import range import tensorflow.compat.v1 as tf from tensorflow.contrib import data as contrib_data from tensorflow.contrib import layers as contrib_layers from tensorflow.contrib import tpu as contrib_tpu _PrelimPrediction = collections.namedtuple( "PrelimPrediction", ["feature_index", "start_index", "end_index", "start_log_prob", "end_log_prob"]) _NbestPrediction = collections.namedtuple( "NbestPrediction", ["text", "start_log_prob", "end_log_prob"]) RawResult = collections.namedtuple("RawResult", ["unique_id", "start_log_prob", "end_log_prob"]) RawResultV2 = collections.namedtuple( "RawResultV2", ["unique_id", "start_top_log_probs", "start_top_index", "end_top_log_probs", "end_top_index", "cls_logits"]) class SquadExample(object): def __init__(self, qas_id, question_text, paragraph_text, orig_answer_text=None, start_position=None, end_position=None, is_impossible=False): self.qas_id = qas_id self.question_text = question_text self.paragraph_text = paragraph_text self.orig_answer_text = orig_answer_text self.start_position = start_position self.end_position = end_position self.is_impossible = is_impossible def __str__(self): return self.__repr__() def __repr__(self): s = "" s += "qas_id: %s" % (tokenization.printable_text(self.qas_id)) s += ", question_text: %s" % ( tokenization.printable_text(self.question_text)) s += ", paragraph_text: [%s]" % (" ".join(self.paragraph_text)) if self.start_position: s += ", start_position: %d" % (self.start_position) if self.start_position: s += ", end_position: %d" % (self.end_position) if self.start_position: s += ", is_impossible: %r" % (self.is_impossible) return s class InputFeatures(object): def __init__(self, unique_id, example_index, doc_span_index, tok_start_to_orig_index, tok_end_to_orig_index, token_is_max_context, tokens, input_ids, input_mask, segment_ids, paragraph_len, p_mask=None, start_position=None, end_position=None, is_impossible=None): self.unique_id = unique_id self.example_index = example_index self.doc_span_index = doc_span_index self.tok_start_to_orig_index = tok_start_to_orig_index self.tok_end_to_orig_index = tok_end_to_orig_index self.token_is_max_context = token_is_max_context self.tokens = tokens self.input_ids = input_ids self.input_mask = input_mask self.segment_ids = segment_ids self.paragraph_len = paragraph_len self.start_position = start_position self.end_position = end_position self.is_impossible = is_impossible self.p_mask = p_mask def read_squad_examples(input_file, is_training): with tf.gfile.Open(input_file, "r") as reader: input_data = json.load(reader)["data"] examples = [] for entry in input_data: for paragraph in entry["paragraphs"]: paragraph_text = paragraph["context"] for qa in paragraph["qas"]: qas_id = qa["id"] question_text = qa["question"] start_position = None orig_answer_text = None is_impossible = False if is_training: is_impossible = qa.get("is_impossible", False) if (len(qa["answers"]) != 1) and (not is_impossible): raise ValueError( "For training, each question should have exactly 1 answer.") if not is_impossible: answer = qa["answers"][0] orig_answer_text = answer["text"] start_position = answer["answer_start"] else: start_position = -1 orig_answer_text = "" example = SquadExample( qas_id=qas_id, question_text=question_text, paragraph_text=paragraph_text, orig_answer_text=orig_answer_text, start_position=start_position, is_impossible=is_impossible) examples.append(example) return examples def _convert_index(index, pos, m=None, is_start=True): if index[pos] is not None: return index[pos] n = len(index) rear = pos while rear < n - 1 and index[rear] is None: rear += 1 front = pos while front > 0 and index[front] is None: front -= 1 assert index[front] is not None or index[rear] is not None if index[front] is None: if index[rear] >= 1: if is_start: return 0 else: return index[rear] - 1 return index[rear] if index[rear] is None: if m is not None and index[front] < m - 1: if is_start: return index[front] + 1 else: return m - 1 return index[front] if is_start: if index[rear] > index[front] + 1: return index[front] + 1 else: return index[rear] else: if index[rear] > index[front] + 1: return index[rear] - 1 else: return index[front]
Apache License 2.0
climdyn/qgs
qgs/params/params.py
ScaleParams.phi0
python
def phi0(self): return Parameter(self.phi0_npi * np.pi, units='[rad]', description="The reference latitude of the center of the domain", return_dimensional=True)
Parameter: The reference latitude :math:`\\phi_0` at the center of the domain, expressed in radians [:math:`rad`].
https://github.com/climdyn/qgs/blob/33d79b1fa360de22b7ae595c142dbe9b6a8fb53a/qgs/params/params.py#L281-L285
import numpy as np import pickle import warnings from abc import ABC from qgs.params.parameter import Parameter from qgs.basis.fourier import contiguous_channel_basis, contiguous_basin_basis from qgs.basis.fourier import ChannelFourierBasis, BasinFourierBasis class Params(ABC): _name = "" def __init__(self, dic=None): self.set_params(dic) def set_params(self, dic): if dic is not None: for key, val in zip(dic.keys(), dic.values()): if key in self.__dict__.keys(): if isinstance(self.__dict__[key], Parameter): if isinstance(val, Parameter): self.__dict__[key] = val else: d = self.__dict__[key].__dict__ self.__dict__[key] = Parameter(val, input_dimensional=d['_input_dimensional'], units=d['_units'], description=d['_description'], scale_object=d['_scale_object'], return_dimensional=d['_return_dimensional']) else: self.__dict__[key] = val def __str__(self): s = "" for key, val in zip(self.__dict__.keys(), self.__dict__.values()): if 'params' not in key and key[0] != '_': if val is None: pass elif isinstance(val, Parameter): if val.input_dimensional: units = val.units efval = val.dimensional_value else: efval = val.nondimensional_value if val.nondimensional_value == val.dimensional_value: units = "" else: units = "[nondim]" s += "'" + key + "': " + str(efval) + " " + units + " (" + val.description + "),\n" elif isinstance(val, (np.ndarray, list, tuple)) and isinstance(val[0], Parameter): for i, v in enumerate(val): if v.input_dimensional: units = v.units efval = v.dimensional_value else: efval = v.nondimensional_value if v.nondimensional_value == v.dimensional_value: units = "" else: units = "[nondim]" s += "'" + key + "["+str(i+1)+"]': " + str(efval) + " " + units + " (" + v.description + "),\n" else: s += "'"+key+"': "+str(val)+",\n" return s def _list_params(self): return self._name+" Parameters:\n"+self.__str__() def print_params(self): print(self._list_params()) @staticmethod def create_params_array(values, input_dimensional=None, units=None, scale_object=None, description=None, return_dimensional=None): if hasattr(values, "__iter__"): ls = len(values) if not isinstance(input_dimensional, list): if input_dimensional is None: input_dimensional = True idx = ls * [input_dimensional] else: idx = input_dimensional if not isinstance(units, list): if units is None: units = "" u = ls * [units] else: u = units if not isinstance(description, list): if description is None: description = "" d = ls * [description] else: d = description if not isinstance(scale_object, list): s = ls * [scale_object] else: s = scale_object if not isinstance(return_dimensional, list): if return_dimensional is None: return_dimensional = False rd = ls * [return_dimensional] else: rd = return_dimensional arr = list() for i, val in enumerate(values): arr.append(Parameter(val, input_dimensional=idx[i], units=u[i], scale_object=s[i], description=d[i], return_dimensional=rd[i])) else: arr = values * [Parameter(0.e0, input_dimensional=input_dimensional, units=units, scale_object=scale_object, description=description, return_dimensional=return_dimensional)] return np.array(arr, dtype=object) def __repr__(self): s = super(Params, self).__repr__()+"\n"+self._list_params() return s def load_from_file(self, filename, **kwargs): f = open(filename, 'rb') tmp_dict = pickle.load(f, **kwargs) f.close() self.__dict__.clear() self.__dict__.update(tmp_dict) def save_to_file(self, filename, **kwargs): f = open(filename, 'wb') pickle.dump(self.__dict__, f, **kwargs) f.close() class ScaleParams(Params): _name = "Scale" def __init__(self, dic=None): Params.__init__(self, dic) self.scale = Parameter(5.e6, units='[m]', description="characteristic space scale (L*pi)", return_dimensional=True) self.f0 = Parameter(1.032e-4, units='[s^-1]', description="Coriolis parameter at the middle of the domain", return_dimensional=True) self.n = Parameter(1.3e0, input_dimensional=False, description="aspect ratio (n = 2 L_y / L_x)") self.rra = Parameter(6370.e3, units='[m]', description="earth radius", return_dimensional=True) self.phi0_npi = Parameter(0.25e0, input_dimensional=False, description="latitude expressed in fraction of pi") self.deltap = Parameter(5.e4, units='[Pa]', description='pressure difference between the two atmospheric layers', return_dimensional=True) self.set_params(dic) @property def L(self): return Parameter(self.scale / np.pi, units=self.scale.units, description='Typical length scale L', return_dimensional=True) @property def L_y(self): return Parameter(self.scale, units=self.scale.units, description='The meridional extent of the model domain', return_dimensional=True) @property def L_x(self): return Parameter(2 * self.scale / self.n, units=self.scale.units, description='The zonal extent of the model domain', return_dimensional=True) @property
MIT License
saketkc/pysradb
pysradb/geoweb.py
GEOweb.__init__
python
def __init__(self):
Initialize GEOweb without any database.
https://github.com/saketkc/pysradb/blob/bce1726813a104ff83eb1221679bf93074252af6/pysradb/geoweb.py#L22-L23
import gzip import os import re import requests import sys from lxml import html from .download import download_file from .geodb import GEOdb from .utils import _get_url from .utils import copyfileobj from .utils import get_gzip_uncompressed_size PY3 = True if sys.version_info[0] < 3: PY3 = False class GEOweb(GEOdb):
BSD 3-Clause New or Revised License
zhaocq-nlp/njunmt-tf
njunmt/encoders/encoder.py
Encoder.__init__
python
def __init__(self, params, mode, name=None, verbose=True): super(Encoder, self).__init__( params=params, mode=mode, verbose=verbose, name=name or self.__class__.__name__) self._encoder_output_tuple_type = namedtuple( "EncoderOutput", "outputs final_states attention_values attention_length")
Initializes the parameters of the encoder. Args: params: A dictionary of parameters to construct the encoder architecture. mode: A mode. name: The name of this encoder. verbose: Print encoder parameters if set True.
https://github.com/zhaocq-nlp/njunmt-tf/blob/f1440726b3c007bcf19126fc4dee43a91dccc718/njunmt/encoders/encoder.py#L27-L46
from __future__ import absolute_import from __future__ import division from __future__ import print_function from abc import abstractmethod from collections import namedtuple from njunmt.utils.configurable import Configurable class Encoder(Configurable):
Apache License 2.0
osmr/imgclsmob
gluon/gluoncv2/models/resnext_cifar.py
resnext20_1x64d_svhn
python
def resnext20_1x64d_svhn(classes=10, **kwargs): return get_resnext_cifar(classes=classes, blocks=20, cardinality=1, bottleneck_width=64, model_name="resnext20_1x64d_svhn", **kwargs)
ResNeXt-20 (1x64d) model for SVHN from 'Aggregated Residual Transformations for Deep Neural Networks,' http://arxiv.org/abs/1611.05431. Parameters: ---------- classes : int, default 10 Number of classification classes. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default '~/.mxnet/models' Location for keeping the model parameters.
https://github.com/osmr/imgclsmob/blob/ea5f784eea865ce830f3f97c5c1d1f6491d9cbb2/gluon/gluoncv2/models/resnext_cifar.py#L214-L231
__all__ = ['CIFARResNeXt', 'resnext20_1x64d_cifar10', 'resnext20_1x64d_cifar100', 'resnext20_1x64d_svhn', 'resnext20_2x32d_cifar10', 'resnext20_2x32d_cifar100', 'resnext20_2x32d_svhn', 'resnext20_2x64d_cifar10', 'resnext20_2x64d_cifar100', 'resnext20_2x64d_svhn', 'resnext20_4x16d_cifar10', 'resnext20_4x16d_cifar100', 'resnext20_4x16d_svhn', 'resnext20_4x32d_cifar10', 'resnext20_4x32d_cifar100', 'resnext20_4x32d_svhn', 'resnext20_8x8d_cifar10', 'resnext20_8x8d_cifar100', 'resnext20_8x8d_svhn', 'resnext20_8x16d_cifar10', 'resnext20_8x16d_cifar100', 'resnext20_8x16d_svhn', 'resnext20_16x4d_cifar10', 'resnext20_16x4d_cifar100', 'resnext20_16x4d_svhn', 'resnext20_16x8d_cifar10', 'resnext20_16x8d_cifar100', 'resnext20_16x8d_svhn', 'resnext20_32x2d_cifar10', 'resnext20_32x2d_cifar100', 'resnext20_32x2d_svhn', 'resnext20_32x4d_cifar10', 'resnext20_32x4d_cifar100', 'resnext20_32x4d_svhn', 'resnext20_64x1d_cifar10', 'resnext20_64x1d_cifar100', 'resnext20_64x1d_svhn', 'resnext20_64x2d_cifar10', 'resnext20_64x2d_cifar100', 'resnext20_64x2d_svhn', 'resnext29_32x4d_cifar10', 'resnext29_32x4d_cifar100', 'resnext29_32x4d_svhn', 'resnext29_16x64d_cifar10', 'resnext29_16x64d_cifar100', 'resnext29_16x64d_svhn', 'resnext56_1x64d_cifar10', 'resnext56_1x64d_cifar100', 'resnext56_1x64d_svhn', 'resnext56_2x32d_cifar10', 'resnext56_2x32d_cifar100', 'resnext56_2x32d_svhn', 'resnext56_4x16d_cifar10', 'resnext56_4x16d_cifar100', 'resnext56_4x16d_svhn', 'resnext56_8x8d_cifar10', 'resnext56_8x8d_cifar100', 'resnext56_8x8d_svhn', 'resnext56_16x4d_cifar10', 'resnext56_16x4d_cifar100', 'resnext56_16x4d_svhn', 'resnext56_32x2d_cifar10', 'resnext56_32x2d_cifar100', 'resnext56_32x2d_svhn', 'resnext56_64x1d_cifar10', 'resnext56_64x1d_cifar100', 'resnext56_64x1d_svhn', 'resnext272_1x64d_cifar10', 'resnext272_1x64d_cifar100', 'resnext272_1x64d_svhn', 'resnext272_2x32d_cifar10', 'resnext272_2x32d_cifar100', 'resnext272_2x32d_svhn'] import os from mxnet import cpu from mxnet.gluon import nn, HybridBlock from .common import conv3x3_block from .resnext import ResNeXtUnit class CIFARResNeXt(HybridBlock): def __init__(self, channels, init_block_channels, cardinality, bottleneck_width, bn_use_global_stats=False, in_channels=3, in_size=(32, 32), classes=10, **kwargs): super(CIFARResNeXt, self).__init__(**kwargs) self.in_size = in_size self.classes = classes with self.name_scope(): self.features = nn.HybridSequential(prefix="") self.features.add(conv3x3_block( in_channels=in_channels, out_channels=init_block_channels, bn_use_global_stats=bn_use_global_stats)) in_channels = init_block_channels for i, channels_per_stage in enumerate(channels): stage = nn.HybridSequential(prefix="stage{}_".format(i + 1)) with stage.name_scope(): for j, out_channels in enumerate(channels_per_stage): strides = 2 if (j == 0) and (i != 0) else 1 stage.add(ResNeXtUnit( in_channels=in_channels, out_channels=out_channels, strides=strides, cardinality=cardinality, bottleneck_width=bottleneck_width, bn_use_global_stats=bn_use_global_stats)) in_channels = out_channels self.features.add(stage) self.features.add(nn.AvgPool2D( pool_size=8, strides=1)) self.output = nn.HybridSequential(prefix="") self.output.add(nn.Flatten()) self.output.add(nn.Dense( units=classes, in_units=in_channels)) def hybrid_forward(self, F, x): x = self.features(x) x = self.output(x) return x def get_resnext_cifar(classes, blocks, cardinality, bottleneck_width, model_name=None, pretrained=False, ctx=cpu(), root=os.path.join("~", ".mxnet", "models"), **kwargs): assert (blocks - 2) % 9 == 0 layers = [(blocks - 2) // 9] * 3 channels_per_layers = [256, 512, 1024] init_block_channels = 64 channels = [[ci] * li for (ci, li) in zip(channels_per_layers, layers)] net = CIFARResNeXt( channels=channels, init_block_channels=init_block_channels, cardinality=cardinality, bottleneck_width=bottleneck_width, classes=classes, **kwargs) if pretrained: if (model_name is None) or (not model_name): raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.") from .model_store import get_model_file net.load_parameters( filename=get_model_file( model_name=model_name, local_model_store_dir_path=root), ctx=ctx) return net def resnext20_1x64d_cifar10(classes=10, **kwargs): return get_resnext_cifar(classes=classes, blocks=20, cardinality=1, bottleneck_width=64, model_name="resnext20_1x64d_cifar10", **kwargs) def resnext20_1x64d_cifar100(classes=100, **kwargs): return get_resnext_cifar(classes=classes, blocks=20, cardinality=1, bottleneck_width=64, model_name="resnext20_1x64d_cifar100", **kwargs)
MIT License
dmontagu/fastapi-utils
fastapi_utils/timing.py
_TimingStats.emit
python
def emit(self, note: Optional[str] = None) -> None: if not self.silent: self.take_split() cpu_ms = 1000 * self.cpu_time wall_ms = 1000 * self.time message = f"TIMING: Wall: {wall_ms:6.1f}ms | CPU: {cpu_ms:6.1f}ms | {self.name}" if note is not None: message += f" ({note})" self.record(message)
Emit timing information, optionally including a specified note
https://github.com/dmontagu/fastapi-utils/blob/af95ff4a8195caaa9edaa3dbd5b6eeb09691d9c7/fastapi_utils/timing.py#L121-L132
import resource import time from typing import Any, Callable, Optional from fastapi import FastAPI from starlette.middleware.base import RequestResponseEndpoint from starlette.requests import Request from starlette.responses import Response from starlette.routing import Match, Mount from starlette.types import Scope TIMER_ATTRIBUTE = "__fastapi_utils_timer__" def add_timing_middleware( app: FastAPI, record: Optional[Callable[[str], None]] = None, prefix: str = "", exclude: Optional[str] = None ) -> None: metric_namer = _MetricNamer(prefix=prefix, app=app) @app.middleware("http") async def timing_middleware(request: Request, call_next: RequestResponseEndpoint) -> Response: metric_name = metric_namer(request.scope) with _TimingStats(metric_name, record=record, exclude=exclude) as timer: setattr(request.state, TIMER_ATTRIBUTE, timer) response = await call_next(request) return response def record_timing(request: Request, note: Optional[str] = None) -> None: timer = getattr(request.state, TIMER_ATTRIBUTE, None) if timer is not None: assert isinstance(timer, _TimingStats) timer.emit(note) else: raise ValueError("No timer present on request") class _TimingStats: def __init__( self, name: Optional[str] = None, record: Callable[[str], None] = None, exclude: Optional[str] = None ) -> None: self.name = name self.record = record or print self.start_time: float = 0 self.start_cpu_time: float = 0 self.end_cpu_time: float = 0 self.end_time: float = 0 self.silent: bool = False if self.name is not None and exclude is not None and (exclude in self.name): self.silent = True def start(self) -> None: self.start_time = time.time() self.start_cpu_time = _get_cpu_time() def take_split(self) -> None: self.end_time = time.time() self.end_cpu_time = _get_cpu_time() @property def time(self) -> float: return self.end_time - self.start_time @property def cpu_time(self) -> float: return self.end_cpu_time - self.start_cpu_time def __enter__(self) -> "_TimingStats": self.start() return self def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: self.emit()
MIT License
rucio/rucio
lib/rucio/core/rse.py
delete_rse_transfer_limits
python
def delete_rse_transfer_limits(rse_id, activity=None, session=None): try: query = session.query(models.RSETransferLimit).filter_by(rse_id=rse_id) if activity: query = query.filter_by(activity=activity) rowcount = query.delete() return rowcount except IntegrityError as error: raise exception.RucioException(error.args)
Delete RSE transfer limits. :param rse_id: The RSE id. :param activity: The activity.
https://github.com/rucio/rucio/blob/6a6092798bb8220dec07328d0e3f7f42d1b931cd/lib/rucio/core/rse.py#L845-L859
import json from io import StringIO from re import match from typing import TYPE_CHECKING import sqlalchemy import sqlalchemy.orm from dogpile.cache import make_region from dogpile.cache.api import NO_VALUE from six import string_types from sqlalchemy.exc import DatabaseError, IntegrityError, OperationalError from sqlalchemy.orm import aliased from sqlalchemy.orm.exc import FlushError from sqlalchemy.sql.expression import or_, false import rucio.core.account_counter from rucio.common import exception, utils from rucio.common.config import get_lfn2pfn_algorithm_default, config_get from rucio.common.utils import CHECKSUM_KEY, is_checksum_valid, GLOBALLY_SUPPORTED_CHECKSUMS from rucio.core.rse_counter import add_counter, get_counter from rucio.db.sqla import models from rucio.db.sqla.constants import RSEType from rucio.db.sqla.session import read_session, transactional_session, stream_session if TYPE_CHECKING: from typing import Dict, Optional from sqlalchemy.orm import Session REGION = make_region().configure('dogpile.cache.memcached', expiration_time=3600, arguments={'url': config_get('cache', 'url', False, '127.0.0.1:11211', check_config_table=False), 'distributed_lock': True}) @transactional_session def add_rse(rse, vo='def', deterministic=True, volatile=False, city=None, region_code=None, country_name=None, continent=None, time_zone=None, ISP=None, staging_area=False, rse_type=RSEType.DISK, longitude=None, latitude=None, ASN=None, availability=7, session=None): if isinstance(rse_type, string_types): rse_type = RSEType(rse_type) new_rse = models.RSE(rse=rse, vo=vo, deterministic=deterministic, volatile=volatile, city=city, region_code=region_code, country_name=country_name, continent=continent, time_zone=time_zone, staging_area=staging_area, ISP=ISP, availability=availability, rse_type=rse_type, longitude=longitude, latitude=latitude, ASN=ASN) try: new_rse.save(session=session) except IntegrityError: raise exception.Duplicate('RSE \'%(rse)s\' already exists!' % locals()) except DatabaseError as error: raise exception.RucioException(error.args) add_rse_attribute(rse_id=new_rse.id, key=rse, value=True, session=session) add_counter(rse_id=new_rse.id, session=session) rucio.core.account_counter.create_counters_for_new_rse(rse_id=new_rse.id, session=session) return new_rse.id @read_session def rse_exists(rse, vo='def', include_deleted=False, session=None): return True if session.query(models.RSE).filter_by(rse=rse, vo=vo, deleted=include_deleted).first() else False @read_session def sort_rses(rses, session=None): if not rses: raise exception.InputValidationError('The list rses should not be empty!') if len(rses) == 1: return rses false_value = False query = session.query(models.RSE.rse, models.RSE.staging_area, models.RSEUsage.rse_id). filter(models.RSEUsage.source == 'storage'). filter(models.RSEUsage.rse_id == models.RSE.id). filter(models.RSE.deleted == false_value) condition = [] for rse in rses: condition.append(models.RSE.id == rse['id']) query = query.filter(or_(*condition)).order_by(models.RSEUsage.free.asc()) return [{'rse': rse, 'staging_area': staging_area, 'id': rse_id} for rse, staging_area, rse_id in query] @transactional_session def del_rse(rse_id, session=None): old_rse = None try: old_rse = session.query(models.RSE).filter_by(id=rse_id, deleted=False).one() if not rse_is_empty(rse_id=rse_id, session=session): raise exception.RSEOperationNotSupported('RSE \'%s\' is not empty' % get_rse_name(rse_id=rse_id, session=session)) except sqlalchemy.orm.exc.NoResultFound: raise exception.RSENotFound('RSE with id \'%s\' cannot be found' % rse_id) rse = old_rse.rse old_rse.delete(session=session) try: del_rse_attribute(rse_id=rse_id, key=rse, session=session) except exception.RSEAttributeNotFound: pass @transactional_session def restore_rse(rse_id, session=None): old_rse = None try: old_rse = session.query(models.RSE).filter_by(id=rse_id, deleted=True).one() except sqlalchemy.orm.exc.NoResultFound: raise exception.RSENotFound('RSE with id \'%s\' cannot be found' % rse_id) old_rse.deleted = False old_rse.deleted_at = None old_rse.save(session=session) rse = old_rse.rse add_rse_attribute(rse_id=rse_id, key=rse, value=True, session=session) @read_session def rse_is_empty(rse_id, session=None): is_empty = False try: is_empty = get_counter(rse_id, session=session)['bytes'] == 0 except exception.CounterNotFound: is_empty = True return is_empty @read_session def get_rse(rse_id, session=None): false_value = False try: tmp = session.query(models.RSE). filter(sqlalchemy.and_(models.RSE.deleted == false_value, models.RSE.id == rse_id)) .one() tmp['type'] = tmp.rse_type return tmp except sqlalchemy.orm.exc.NoResultFound: raise exception.RSENotFound('RSE with id \'%s\' cannot be found' % rse_id) @read_session def get_rse_id(rse, vo='def', session=None, include_deleted=True): if include_deleted: if vo != 'def': cache_key = 'rse-id_{}@{}'.format(rse, vo).replace(' ', '.') else: cache_key = 'rse-id_{}'.format(rse).replace(' ', '.') result = REGION.get(cache_key) if result != NO_VALUE: return result try: query = session.query(models.RSE.id).filter_by(rse=rse, vo=vo) if not include_deleted: query = query.filter_by(deleted=False) result = query.one()[0] except sqlalchemy.orm.exc.NoResultFound: raise exception.RSENotFound("RSE '%s' cannot be found in vo '%s'" % (rse, vo)) if include_deleted: REGION.set(cache_key, result) return result @read_session def get_rse_name(rse_id, session=None, include_deleted=True): if include_deleted: cache_key = 'rse-name_{}'.format(rse_id) result = REGION.get(cache_key) if result != NO_VALUE: return result try: query = session.query(models.RSE.rse).filter_by(id=rse_id) if not include_deleted: query = query.filter_by(deleted=False) result = query.one()[0] except sqlalchemy.orm.exc.NoResultFound: raise exception.RSENotFound('RSE with ID \'%s\' cannot be found' % rse_id) if include_deleted: REGION.set(cache_key, result) return result @read_session def get_rse_vo(rse_id, session=None, include_deleted=True): if include_deleted: cache_key = 'rse-vo_{}'.format(rse_id) result = REGION.get(cache_key) if result != NO_VALUE: return result try: query = session.query(models.RSE.vo).filter_by(id=rse_id) if not include_deleted: query = query.filter_by(deleted=False) result = query.one()[0] except sqlalchemy.orm.exc.NoResultFound: raise exception.RSENotFound('RSE with ID \'%s\' cannot be found' % rse_id) if include_deleted: REGION.set(cache_key, result) return result @read_session def list_rses(filters={}, session=None): rse_list = [] availability_mask1 = 0 availability_mask2 = 7 availability_mapping = {'availability_read': 4, 'availability_write': 2, 'availability_delete': 1} false_value = False if filters and filters.get('vo'): filters = filters.copy() vo = filters.pop('vo') else: vo = None if filters: if 'availability' in filters and ('availability_read' in filters or 'availability_write' in filters or 'availability_delete' in filters): raise exception.InvalidObject('Cannot use availability and read, write, delete filter at the same time.') query = session.query(models.RSE). join(models.RSEAttrAssociation, models.RSE.id == models.RSEAttrAssociation.rse_id). filter(models.RSE.deleted == false_value).group_by(models.RSE) for (k, v) in filters.items(): if hasattr(models.RSE, k): if k == 'rse_type': query = query.filter(getattr(models.RSE, k) == RSEType[v]) else: query = query.filter(getattr(models.RSE, k) == v) elif k in ['availability_read', 'availability_write', 'availability_delete']: if v: availability_mask1 = availability_mask1 | availability_mapping[k] else: availability_mask2 = availability_mask2 & ~availability_mapping[k] else: t = aliased(models.RSEAttrAssociation) query = query.join(t, t.rse_id == models.RSEAttrAssociation.rse_id) query = query.filter(t.key == k, t.value == v) condition1, condition2 = [], [] for i in range(0, 8): if i | availability_mask1 == i: condition1.append(models.RSE.availability == i) if i & availability_mask2 == i: condition2.append(models.RSE.availability == i) if 'availability' not in filters: query = query.filter(sqlalchemy.and_(sqlalchemy.or_(*condition1), sqlalchemy.or_(*condition2))) else: query = session.query(models.RSE).filter_by(deleted=False).order_by(models.RSE.rse) if vo: query = query.filter(getattr(models.RSE, 'vo') == vo) for row in query: dic = {} for column in row.__table__.columns: dic[column.name] = getattr(row, column.name) rse_list.append(dic) return rse_list @transactional_session def add_rse_attribute(rse_id, key, value, session=None): try: new_rse_attr = models.RSEAttrAssociation(rse_id=rse_id, key=key, value=value) new_rse_attr = session.merge(new_rse_attr) new_rse_attr.save(session=session) except IntegrityError: rse = get_rse_name(rse_id=rse_id, session=session) raise exception.Duplicate("RSE attribute '%(key)s-%(value)s\' for RSE '%(rse)s' already exists!" % locals()) return True @transactional_session def del_rse_attribute(rse_id, key, session=None): rse_attr = None try: query = session.query(models.RSEAttrAssociation).filter_by(rse_id=rse_id).filter(models.RSEAttrAssociation.key == key) rse_attr = query.one() except sqlalchemy.orm.exc.NoResultFound: raise exception.RSEAttributeNotFound('RSE attribute \'%s\' cannot be found' % key) rse_attr.delete(session=session) return True @read_session def list_rse_attributes(rse_id, session=None): rse_attrs = {} query = session.query(models.RSEAttrAssociation).filter_by(rse_id=rse_id) for attr in query: rse_attrs[attr.key] = attr.value return rse_attrs @read_session def has_rse_attribute(rse_id, key, session=None): if session.query(models.RSEAttrAssociation.value).filter_by(rse_id=rse_id, key=key).first(): return True return False @read_session def get_rses_with_attribute(key, session=None): rse_list = [] query = session.query(models.RSE). join(models.RSEAttrAssociation, models.RSE.id == models.RSEAttrAssociation.rse_id). filter(models.RSE.deleted == False, models.RSEAttrAssociation.key == key).group_by(models.RSE) for row in query: d = {} for column in row.__table__.columns: d[column.name] = getattr(row, column.name) rse_list.append(d) return rse_list @read_session def get_rses_with_attribute_value(key, value, lookup_key, vo='def', session=None): if vo != 'def': cache_key = 'av-%s-%s-%s@%s' % (key, value, lookup_key, vo) else: cache_key = 'av-%s-%s-%s' % (key, value, lookup_key) result = REGION.get(cache_key) if result is NO_VALUE: rse_list = [] subquery = session.query(models.RSEAttrAssociation.rse_id) .filter(models.RSEAttrAssociation.key == key, models.RSEAttrAssociation.value == value) .subquery() query = session.query(models.RSEAttrAssociation.rse_id, models.RSEAttrAssociation.key, models.RSEAttrAssociation.value) .join(models.RSE, models.RSE.id == models.RSEAttrAssociation.rse_id) .join(subquery, models.RSEAttrAssociation.rse_id == subquery.c.rse_id) .filter(models.RSE.deleted == false(), models.RSEAttrAssociation.key == lookup_key, models.RSE.vo == vo) for row in query: rse_list.append({'rse_id': row[0], 'key': row[1], 'value': row[2]}) REGION.set(cache_key, rse_list) return rse_list return result @read_session def get_rse_attribute(key, rse_id=None, value=None, use_cache=True, session=None): result = NO_VALUE if use_cache: result = REGION.get('%s-%s-%s' % (key, rse_id, value)) if result is NO_VALUE: rse_attrs = [] if rse_id: query = session.query(models.RSEAttrAssociation.value).filter_by(rse_id=rse_id, key=key).distinct() if value: query = session.query(models.RSEAttrAssociation.value).filter_by(rse_id=rse_id, key=key, value=value).distinct() else: query = session.query(models.RSEAttrAssociation.value).filter_by(key=key).distinct() if value: query = session.query(models.RSEAttrAssociation.value).filter_by(key=key, value=value).distinct() for attr_value in query: rse_attrs.append(attr_value[0]) REGION.set('%s-%s-%s' % (key, rse_id, value), rse_attrs) return rse_attrs return result @read_session def get_rse_supported_checksums(rse_id, session=None): return parse_checksum_support_attribute(get_rse_attribute(key=CHECKSUM_KEY, rse_id=rse_id, session=session)) def get_rse_supported_checksums_from_attributes(rse_attributes): return parse_checksum_support_attribute(rse_attributes.get(CHECKSUM_KEY)) def parse_checksum_support_attribute(checksum_attribute): if not checksum_attribute: return GLOBALLY_SUPPORTED_CHECKSUMS else: supported_checksum_list = checksum_attribute[0].split(',') if 'none' in supported_checksum_list: return [] return supported_checksum_list @read_session def get_rse_is_checksum_supported(checksum_name, rse_id=None, session=None): if is_checksum_valid(checksum_name): return checksum_name in get_rse_supported_checksums(rse_id=rse_id, session=session) else: return False @transactional_session def set_rse_usage(rse_id, source, used, free, files=None, session=None): rse_usage = models.RSEUsage(rse_id=rse_id, source=source, used=used, free=free, files=files) rse_usage = session.merge(rse_usage) rse_usage.save(session=session) return True @read_session def get_rse_usage(rse_id, source=None, session=None, per_account=False): query_rse_usage = session.query(models.RSEUsage).filter_by(rse_id=rse_id) usage = list() if source: query_rse_usage = query_rse_usage.filter_by(source=source) rse = get_rse_name(rse_id=rse_id, session=session) for row in query_rse_usage: total = (row.free or 0) + (row.used or 0) rse_usage = {'rse_id': rse_id, 'rse': rse, 'source': row.source, 'used': row.used, 'free': row.free, 'total': total, 'files': row.files, 'updated_at': row.updated_at} if per_account and row.source == 'rucio': query_account_usage = session.query(models.AccountUsage).filter_by(rse_id=rse_id) account_usages = [] for row in query_account_usage: if row.bytes != 0: percentage = round(float(row.bytes) / float(total) * 100, 2) if total else 0 account_usages.append({'used': row.bytes, 'account': row.account, 'percentage': percentage}) account_usages.sort(key=lambda x: x['used'], reverse=True) rse_usage['account_usages'] = account_usages usage.append(rse_usage) return usage @transactional_session def set_rse_limits(rse_id: str, name: str, value: int, session: 'Session' = None) -> bool: rse_limit = models.RSELimit(rse_id=rse_id, name=name, value=value) rse_limit = session.merge(rse_limit) rse_limit.save(session=session) return True @read_session def get_rse_limits(rse_id: str, name: 'Optional[str]' = None, session: 'Session' = None) -> 'Dict[str, int]': query = session.query(models.RSELimit).filter_by(rse_id=rse_id) if name: query = query.filter_by(name=name) return {limit.name: limit.value for limit in query} @transactional_session def delete_rse_limits(rse_id: str, name: 'Optional[str]' = None, session: 'Session' = None) -> None: try: session.query(models.RSELimit).filter_by(rse_id=rse_id, name=name).delete() except IntegrityError as error: raise exception.RucioException(error.args) @transactional_session def set_rse_transfer_limits(rse_id, activity, rse_expression=None, max_transfers=0, transfers=0, waitings=0, volume=0, deadline=1, strategy='fifo', session=None): try: rse_tr_limit = models.RSETransferLimit(rse_id=rse_id, activity=activity, rse_expression=rse_expression, max_transfers=max_transfers, transfers=transfers, waitings=waitings, volume=volume, strategy=strategy, deadline=deadline) rse_tr_limit = session.merge(rse_tr_limit) rowcount = rse_tr_limit.save(session=session) return rowcount except IntegrityError as error: raise exception.RucioException(error.args) @read_session def get_rse_transfer_limits(rse_id=None, activity=None, session=None): try: query = session.query(models.RSETransferLimit) if rse_id: query = query.filter_by(rse_id=rse_id) if activity: query = query.filter_by(activity=activity) limits = {} for limit in query: if limit.activity not in limits: limits[limit.activity] = {} limits[limit.activity][limit.rse_id] = {'max_transfers': limit.max_transfers, 'transfers': limit.transfers, 'waitings': limit.waitings, 'volume': limit.volume, 'strategy': limit.strategy, 'deadline': limit.deadline} return limits except IntegrityError as error: raise exception.RucioException(error.args) @transactional_session
Apache License 2.0
anassinator/markov-sentence-correction
deserializer.py
get_all_ngrams
python
def get_all_ngrams(): ngrams = list(get_ngram(i) for i in range(len(_ngram_paths))) return ngrams
Returns a list of all available n-grams.
https://github.com/anassinator/markov-sentence-correction/blob/ebdc07c37e48a110e7cd86d806e20879ef4c12a0/deserializer.py#L198-L201
import os import pickle class Vocabulary(object): def __init__(self, fname): with open(fname) as f: lines = f.readlines() self._set = set() self._list = ['' for i in range(len(lines))] for l in lines: if l: try: index, word = l.split(' ') index = int(index) word = word.strip() except ValueError: continue self._list[index - 1] = word self._set.add(word) def __iter__(self): return iter(self._list) def __contains__(self, word): return word in self._set def get(self, i): return self._list[i - 1] class MarkovChain(object): def __init__(self, order, vocabulary): self._chain = {} self._order = order self._vocab = vocabulary def __contains__(self, present_state): return present_state in self._chain @property def order(self): return self._order def _set(self, present_state, future_state, prob): present_words = tuple(map(self._vocab.get, present_state)) if present_words not in self._chain: self._chain[present_words] = {} future_word = self._vocab.get(future_state) self._chain[present_words][future_word] = prob def yield_future_states(self, present_state): if present_state not in self._chain: return possible_outcomes = self._chain[present_state] for future_state in possible_outcomes: yield (future_state, possible_outcomes[future_state]) @classmethod def from_file(cls, fname, order, vocabulary): with open(fname) as f: lines = f.readlines() chain = MarkovChain(order, vocabulary) for l in lines: if l: try: *present_state, future_state, prob = l.split(' ') present_state = tuple(map(int, present_state)) future_state = int(future_state) prob = 10 ** float(prob) except ValueError: continue chain._set(present_state, future_state, prob) return chain _vocabulary_path = ("data/vocab.p", "data/vocab.txt") _ngram_paths = [ ("data/unigrams.p", "data/unigram_counts.txt"), ("data/bigrams.p", "data/bigram_counts.txt"), ("data/trigrams.p", "data/trigram_counts.txt") ] def get_vocabulary(): serialized_file, raw_file = _vocabulary_path if not os.path.isfile(serialized_file): vocabulary = Vocabulary(raw_file) pickle.dump(vocabulary, open(serialized_file, "wb")) else: vocabulary = pickle.load(open(serialized_file, "rb")) return vocabulary def get_ngram(order): serialized_file, raw_file = _ngram_paths[order] if not os.path.isfile(serialized_file): vocabulary = get_vocabulary() chain = MarkovChain.from_file(raw_file, order, vocabulary) pickle.dump(chain, open(serialized_file, "wb")) else: chain = pickle.load(open(serialized_file, "rb")) return chain
MIT License
flyteorg/flytekit
flytekit/engines/flyte/engine.py
FlyteEngineFactory.get_workflow_execution
python
def get_workflow_execution(self, wf_exec): return FlyteWorkflowExecution(wf_exec)
:param flytekit.common.workflow_execution.SdkWorkflowExecution wf_exec: :rtype: FlyteWorkflowExecution
https://github.com/flyteorg/flytekit/blob/6c032035563ae645b0b93558b3fe3362080057ea/flytekit/engines/flyte/engine.py#L95-L100
import logging as _logging import os as _os import traceback as _traceback from datetime import datetime as _datetime import six as _six from deprecated import deprecated as _deprecated from flyteidl.core import literals_pb2 as _literals_pb2 import flytekit from flytekit.clients.friendly import SynchronousFlyteClient as _SynchronousFlyteClient from flytekit.clients.helpers import iterate_node_executions as _iterate_node_executions from flytekit.clients.helpers import iterate_task_executions as _iterate_task_executions from flytekit.common import constants as _constants from flytekit.common import utils as _common_utils from flytekit.common.exceptions import scopes as _exception_scopes from flytekit.common.exceptions import user as _user_exceptions from flytekit.configuration import auth as _auth_config from flytekit.configuration import internal as _internal_config from flytekit.configuration import platform as _platform_config from flytekit.configuration import sdk as _sdk_config from flytekit.engines import common as _common_engine from flytekit.interfaces.data import data_proxy as _data_proxy from flytekit.interfaces.stats.taggable import get_stats as _get_stats from flytekit.models import common as _common_models from flytekit.models import execution as _execution_models from flytekit.models import literals as _literals from flytekit.models import task as _task_models from flytekit.models.admin import common as _common from flytekit.models.admin import workflow as _workflow_model from flytekit.models.core import errors as _error_models from flytekit.models.core import identifier as _identifier class _FlyteClientManager(object): _CLIENT = None def __init__(self, *args, **kwargs): if type(self)._CLIENT is None: c = _SynchronousFlyteClient(*args, **kwargs) type(self)._CLIENT = c @property def client(self): return type(self)._CLIENT def get_client() -> _SynchronousFlyteClient: return _FlyteClientManager(_platform_config.URL.get(), insecure=_platform_config.INSECURE.get()).client class FlyteEngineFactory(_common_engine.BaseExecutionEngineFactory): def get_workflow(self, sdk_workflow): return FlyteWorkflow(sdk_workflow) def get_task(self, sdk_task): return FlyteTask(sdk_task) def get_launch_plan(self, sdk_launch_plan): return FlyteLaunchPlan(sdk_launch_plan) def get_task_execution(self, task_exec): return FlyteTaskExecution(task_exec) def get_node_execution(self, node_exec): return FlyteNodeExecution(node_exec)
Apache License 2.0
robotools/fontparts
Lib/fontParts/base/kerning.py
BaseKerning.values
python
def values(self): return super(BaseKerning, self).values()
Returns a ``list`` of each pair's values, the values will be :ref:`type-int-float`\s. The list will be unordered. >>> font.kerning.items() [-20, -15, 5, 3.5]
https://github.com/robotools/fontparts/blob/f15d379c46eb9eaf60ce36fb220e2680c51e81c7/Lib/fontParts/base/kerning.py#L430-L440
from fontParts.base.base import ( BaseDict, dynamicProperty, interpolate, reference ) from fontParts.base import normalizers from fontParts.base.deprecated import DeprecatedKerning, RemovedKerning class BaseKerning(BaseDict, DeprecatedKerning, RemovedKerning): keyNormalizer = normalizers.normalizeKerningKey valueNormalizer = normalizers.normalizeKerningValue def _reprContents(self): contents = [] if self.font is not None: contents.append("for font") contents += self.font._reprContents() return contents _font = None font = dynamicProperty("font", "The Kerning's parent :class:`BaseFont`.") def _get_font(self): if self._font is None: return None return self._font() def _set_font(self, font): if self._font is not None and self._font() != font: raise AssertionError("font for kerning already set and is not same as font") if font is not None: font = reference(font) self._font = font def scaleBy(self, factor): factor = normalizers.normalizeTransformationScale(factor) self._scale(factor) def _scale(self, factor): factor = factor[0] for k, v in self.items(): v *= factor self[k] = v def round(self, multiple=1): if not isinstance(multiple, int): raise TypeError("The round multiple must be an int not %s." % multiple.__class__.__name__) self._round(multiple) def _round(self, multiple=1): for pair, value in self.items(): value = int(normalizers.normalizeVisualRounding( value / float(multiple))) * multiple self[pair] = value def interpolate(self, factor, minKerning, maxKerning, round=True, suppressError=True): factor = normalizers.normalizeInterpolationFactor(factor) if not isinstance(minKerning, BaseKerning): raise TypeError(("Interpolation to an instance of %r can not be " "performed from an instance of %r.") % ( self.__class__.__name__, minKerning.__class__.__name__)) if not isinstance(maxKerning, BaseKerning): raise TypeError(("Interpolation to an instance of %r can not be " "performed from an instance of %r.") % ( self.__class__.__name__, maxKerning.__class__.__name__)) round = normalizers.normalizeBoolean(round) suppressError = normalizers.normalizeBoolean(suppressError) self._interpolate(factor, minKerning, maxKerning, round=round, suppressError=suppressError) def _interpolate(self, factor, minKerning, maxKerning, round=True, suppressError=True): import fontMath from fontMath.mathFunctions import setRoundIntegerFunction setRoundIntegerFunction(normalizers.normalizeVisualRounding) kerningGroupCompatibility = self._testKerningGroupCompatibility( minKerning, maxKerning, suppressError=suppressError ) if not kerningGroupCompatibility: self.clear() else: minKerning = fontMath.MathKerning( kerning=minKerning, groups=minKerning.font.groups) maxKerning = fontMath.MathKerning( kerning=maxKerning, groups=maxKerning.font.groups) result = interpolate(minKerning, maxKerning, factor) if round: result.round() self.clear() result.extractKerning(self.font) @staticmethod def _testKerningGroupCompatibility(minKerning, maxKerning, suppressError=False): minGroups = minKerning.font.groups maxGroups = maxKerning.font.groups match = True while match: for _, sideAttr in ( ("side 1", "side1KerningGroups"), ("side 2", "side2KerningGroups") ): minSideGroups = getattr(minGroups, sideAttr) maxSideGroups = getattr(maxGroups, sideAttr) if minSideGroups.keys() != maxSideGroups.keys(): match = False else: for name in minSideGroups.keys(): minGroup = minSideGroups[name] maxGroup = maxSideGroups[name] if set(minGroup) != set(maxGroup): match = False break if not match and not suppressError: raise ValueError("The kerning groups must be exactly the same.") return match def remove(self, pair): del self[pair] def asDict(self, returnIntegers=True): d = {} for k, v in self.items(): d[k] = v if not returnIntegers else normalizers.normalizeVisualRounding(v) return d def __contains__(self, pair): return super(BaseKerning, self).__contains__(pair) def __delitem__(self, pair): super(BaseKerning, self).__delitem__(pair) def __getitem__(self, pair): return super(BaseKerning, self).__getitem__(pair) def __iter__(self): return super(BaseKerning, self).__iter__() def __len__(self): return super(BaseKerning, self).__len__() def __setitem__(self, pair, value): super(BaseKerning, self).__setitem__(pair, value) def clear(self): super(BaseKerning, self).clear() def get(self, pair, default=None): return super(BaseKerning, self).get(pair, default) def find(self, pair, default=None): pair = normalizers.normalizeKerningKey(pair) value = self._find(pair, default) if value != default: value = normalizers.normalizeKerningValue(value) return value def _find(self, pair, default=None): from fontTools.ufoLib.kerning import lookupKerningValue font = self.font groups = font.groups return lookupKerningValue(pair, self, groups, fallback=default) def items(self): return super(BaseKerning, self).items() def keys(self): return super(BaseKerning, self).keys() def pop(self, pair, default=None): return super(BaseKerning, self).pop(pair, default) def update(self, otherKerning): super(BaseKerning, self).update(otherKerning)
MIT License
drkane/find-that-charity
ftc/models/related_organisation.py
RelatedOrganisation.schema_dot_org
python
def schema_dot_org(self, request=None): obj = { "@context": "https://schema.org", "@type": "Organization", "name": self.name, "identifier": self.org_id, } if self.first("url"): obj["url"] = self.first("url").get("value") if self.first("description"): obj["description"] = self.first("description").get("value") if self.alternateName: obj["alternateName"] = self.alternateName if self.dateRegistered: obj["foundingDate"] = self.dateRegistered.isoformat() if not self.active and self.first("dateRemoved"): obj["dissolutionDate"] = self.first("dateRemoved").get("value").isoformat() if len(self.orgIDs) > 1: if request: obj["sameAs"] = [request.build_absolute_uri(id) for id in self.sameAs] else: obj["sameAs"] = self.sameAs return obj
Return a schema.org Organisation object representing this organisation
https://github.com/drkane/find-that-charity/blob/8c2fae8298939a139a8719eb8d8bfbbafa91392a/ftc/models/related_organisation.py#L116-L141
from django.urls import reverse from .organisation import EXTERNAL_LINKS, Organisation from .organisation_type import OrganisationType class RelatedOrganisation: def __init__(self, orgs): self.records = self.prioritise_orgs(orgs) @classmethod def from_orgid(cls, org_id): orgs = Organisation.objects.filter(linked_orgs__contains=[org_id]) return cls(orgs) @property def orgIDs(self): return list(set(self.get_all("orgIDs"))) @property def names(self): names = {} for r in self.records: for n in r.all_names: if n not in names or not n.isupper() or not n.islower(): names[n.lower().strip()] = n return names @property def alternateName(self): names = self.get_all("all_names") return list( set( [ self.names.get(n.lower().strip(), n) for n in names if n.lower().strip() != self.name.lower().strip() ] ) ) @property def name(self): return self.names.get(self.records[0].name.lower(), self.records[0].name) @property def sources(self): sources = list(self.get_all("source")) sources.extend([o.source for o in self.org_links]) return list(set(sources)) @property def org_links(self): org_links = [] for o in self.records: org_links.extend(o.org_links) return list(set(org_links)) def __getattr__(self, key, *args): return getattr(self.records[0], key, *args) def first(self, field, justvalue=False): for r in self.records: if getattr(r, field, None): if justvalue: return getattr(r, field) return { "value": getattr(r, field), "orgid": r.org_id, "source": r.source, } if justvalue: return None return {} def get_all(self, field): seen = set() for r in self.records: values = getattr(r, field, None) if not isinstance(values, list): values = [values] for v in values: if v not in seen: yield v seen.add(v) def prioritise_orgs(self, orgs): return sorted(orgs, key=lambda o: o.get_priority()) def get_links(self): links_seen = set() for r in self.records: for link in r.get_links(): if link[1] not in links_seen: yield link links_seen.add(link[1]) @property def sameAs(self): return [ reverse("orgid_html", kwargs=dict(org_id=o)) for o in self.orgIDs if o != self.org_id ] @property def activeRecords(self): return [r for r in self.records if r.active] @property def inactiveRecords(self): return [r for r in self.records if not r.active]
MIT License
angr/cle
cle/loader.py
Loader.describe_addr
python
def describe_addr(self, addr): o = self.find_object_containing(addr) if o is None: return 'not part of a loaded object' options = [] rva = AT.from_va(addr, o).to_rva() idx = o.symbols.bisect_key_right(rva) - 1 while idx >= 0: sym = o.symbols[idx] if not sym.name or sym.is_import: idx -= 1 continue options.append((sym.relative_addr, '%s+' % sym.name)) break if isinstance(o, ELF): try: plt_addr, plt_name = max((a, n) for n, a in o._plt.items() if a <= rva) except ValueError: pass else: options.append((plt_addr, 'PLT.%s+' % plt_name)) options.append((0, 'offset ')) if o.provides: objname = o.provides elif o.binary: objname = os.path.basename(o.binary) elif self.main_object is o: objname = 'main binary' else: objname = 'object loaded from stream' best_offset, best_prefix = max(options, key=lambda v: v[0]) return '%s%#x in %s (%#x)' % (best_prefix, rva - best_offset, objname, AT.from_va(addr, o).to_lva())
Returns a textual description of what's in memory at the provided address
https://github.com/angr/cle/blob/7996cb1789eccc461cb31ab3c6234a74015489fd/cle/loader.py#L268-L310
import os import sys import platform import logging from collections import OrderedDict from typing import Optional, List import archinfo from archinfo.arch_soot import ArchSoot from .address_translator import AT from .utils import ALIGN_UP, key_bisect_floor_key, key_bisect_insort_right __all__ = ('Loader',) l = logging.getLogger(name=__name__) class Loader: memory: Optional['Clemory'] main_object: Optional['Backend'] tls: Optional['ThreadManager'] def __init__(self, main_binary, auto_load_libs=True, concrete_target = None, force_load_libs=(), skip_libs=(), main_opts=None, lib_opts=None, ld_path=(), use_system_libs=True, ignore_import_version_numbers=True, case_insensitive=False, rebase_granularity=0x100000, except_missing_libs=False, aslr=False, perform_relocations=True, load_debug_info=False, page_size=0x1, preload_libs=(), arch=None): if hasattr(main_binary, 'seek') and hasattr(main_binary, 'read'): self._main_binary_path = None self._main_binary_stream = main_binary else: self._main_binary_path = os.path.realpath(str(main_binary)) self._main_binary_stream = None self._juggling = False if concrete_target: auto_load_libs = False self._auto_load_libs = auto_load_libs self._load_debug_info = load_debug_info self._satisfied_deps = dict((x, False) for x in skip_libs) self._main_opts = {} if main_opts is None else main_opts self._lib_opts = {} if lib_opts is None else lib_opts self._custom_ld_path = [ld_path] if type(ld_path) is str else ld_path force_load_libs = [force_load_libs] if type(force_load_libs) is str else force_load_libs preload_libs = [preload_libs] if type(preload_libs) is str else preload_libs self._use_system_libs = use_system_libs self._ignore_import_version_numbers = ignore_import_version_numbers self._case_insensitive = case_insensitive self._rebase_granularity = rebase_granularity self._except_missing_libs = except_missing_libs self._relocated_objects = set() self._perform_relocations = perform_relocations if sys.platform == 'win32': if self._main_binary_path: self._main_binary_path = self._main_binary_path.lower() force_load_libs = [x.lower() if type(x) is str else x for x in force_load_libs] for x in list(self._satisfied_deps): self._satisfied_deps[x.lower()] = self._satisfied_deps[x] for x in list(self._lib_opts): self._lib_opts[x.lower()] = self._lib_opts[x] self._custom_ld_path = [x.lower() for x in self._custom_ld_path] self.aslr = aslr self.page_size = page_size self.memory = None self.main_object = None self.tls = None self._kernel_object = None self._extern_object = None self.shared_objects = OrderedDict() self.all_objects = [] self.requested_names = set() if arch is not None: self._main_opts.update({'arch': arch}) self.preload_libs = [] self.initial_load_objects = self._internal_load(main_binary, *preload_libs, *force_load_libs, preloading=(main_binary, *preload_libs)) self._last_object = None if self._extern_object and self._extern_object._warned_data_import: l.warning('For more information about "Symbol was allocated without a known size", see https://docs.angr.io/extending-angr/environment#simdata') def close(self): l.warning("You don't need to close the loader anymore :)") def __repr__(self): if self._main_binary_stream is None: return '<Loaded %s, maps [%#x:%#x]>' % (os.path.basename(self._main_binary_path), self.min_addr, self.max_addr) else: return '<Loaded from stream, maps [%#x:%#x]>' % (self.min_addr, self.max_addr) @property def max_addr(self): return self.all_objects[-1].max_addr @property def min_addr(self): return self.all_objects[0].min_addr @property def initializers(self): return sum((x.initializers for x in self.all_objects), []) @property def finalizers(self): return sum((x.finalizers for x in self.all_objects), []) @property def linux_loader_object(self): for obj in self.all_objects: if obj.provides is None: continue if self._is_linux_loader_name(obj.provides) is True: return obj return None @property def elfcore_object(self): for obj in self.all_objects: if isinstance(obj, ELFCore): return obj return None @property def extern_object(self): if self._extern_object is None: if self.main_object.arch.bits < 32: extern_size = 0x200 elif self.main_object.arch.bits == 32: extern_size = 0x8000 else: extern_size = 0x80000 self._extern_object = ExternObject(self, map_size=extern_size) self._internal_load(self._extern_object) return self._extern_object @property def kernel_object(self) -> 'KernelObject': if self._kernel_object is None: self._kernel_object = KernelObject(self) self._map_object(self._kernel_object) return self._kernel_object @property def all_elf_objects(self): return [o for o in self.all_objects if isinstance(o, MetaELF)] @property def all_pe_objects(self): return [o for o in self.all_objects if isinstance(o, PE)] @property def missing_dependencies(self): return self.requested_names - set(k for k,v in self._satisfied_deps.items() if v is not False) @property def auto_load_libs(self): return self._auto_load_libs
BSD 2-Clause Simplified License
wjohnson/pyapacheatlas
pyapacheatlas/core/client.py
AtlasClient.get_relationship
python
def get_relationship(self, guid): results = None atlas_endpoint = self.endpoint_url + f"/relationship/guid/{guid}" getResponse = requests.get( atlas_endpoint, headers=self.authentication.get_authentication_headers() ) results = self._handle_response(getResponse) return results
Retrieve the relationship attribute for the given guid. :param str guid: The unique guid for the relationship. :return: A dict representing AtlasRelationshipWithExtInfo with the relationship (what you probably care about) and referredEntities attributes. :rtype: dict(str, dict)
https://github.com/wjohnson/pyapacheatlas/blob/31925f305ee10ebc39ca41bbfa54a0d17bd3a0ec/pyapacheatlas/core/client.py#L458-L478
from .util import AtlasException, AtlasBaseClient, batch_dependent_entities, PurviewLimitation, PurviewOnly from .glossary import _CrossPlatformTerm, GlossaryClient, PurviewGlossaryClient from .typedef import BaseTypeDef from .msgraph import MsGraphClient from .entity import AtlasClassification, AtlasEntity from ..auth.base import AtlasAuthBase import json from json.decoder import JSONDecodeError import logging import re import requests import warnings import sys _AZ_IDENTITY_INSTALLED = False try: import azure.identity _AZ_IDENTITY_INSTALLED = True from ..auth.azcredential import AzCredentialWrapper except ImportError: pass class AtlasClient(AtlasBaseClient): def __init__(self, endpoint_url, authentication=None): super().__init__() self.authentication = authentication self.endpoint_url = endpoint_url self.glossary = GlossaryClient(endpoint_url, authentication) self.is_purview = False self._purview_url_pattern = r"https:\/\/[a-z0-9-]*?\.(catalog\.purview.azure.com)" if re.match(self._purview_url_pattern, self.endpoint_url): self.is_purview = True def _handle_response(self, resp): try: results = json.loads(resp.text) resp.raise_for_status() except JSONDecodeError: raise ValueError("Error in parsing: {}".format(resp.text)) except requests.RequestException as e: if "errorCode" in results: raise AtlasException(resp.text) else: raise requests.RequestException(resp.text) return results def delete_entity(self, guid): results = None if isinstance(guid, list): guid_str = '&guid='.join(guid) else: guid_str = guid atlas_endpoint = self.endpoint_url + "/entity/bulk?guid={}".format(guid_str) deleteEntity = requests.delete( atlas_endpoint, headers=self.authentication.get_authentication_headers()) results = self._handle_response(deleteEntity) return results def delete_relationship(self, guid): results = None atlas_endpoint = self.endpoint_url + f"/relationship/guid/{guid}" deleteType = requests.delete( atlas_endpoint, headers=self.authentication.get_authentication_headers()) try: deleteType.raise_for_status() except requests.RequestException: raise Exception(deleteType.text) results = { "message": f"Successfully deleted relationship with guid {guid}"} return results def delete_type(self, name): results = None atlas_endpoint = self.endpoint_url + f"/types/typedef/name/{name}" deleteType = requests.delete( atlas_endpoint, headers=self.authentication.get_authentication_headers()) try: deleteType.raise_for_status() except requests.RequestException: raise Exception(deleteType.text) results = {"message": f"successfully delete {name}"} return results def delete_typedefs(self, **kwargs): results = None payload = {} allowed_defs = [ "businessMetadataDefs", "classificationDefs", "entityDefs", "enumDefs", "relationshipDefs", "structDefs"] if len(set(kwargs.keys()).intersection(allowed_defs)) == 0: raise TypeError( f"You must include one of these keyword arguments: {allowed_defs}") for defType in allowed_defs: if defType in kwargs: json_list = [t.to_json() if isinstance( t, BaseTypeDef) else t for t in kwargs[defType]] payload[defType] = json_list atlas_endpoint = self.endpoint_url + "/types/typedefs" deleteType = requests.delete( atlas_endpoint, json=payload, headers=self.authentication.get_authentication_headers()) try: deleteType.raise_for_status() except requests.RequestException: raise Exception(deleteType.text) results = {"message": f"Successfully deleted type(s)"} return results def get_entity(self, guid=None, qualifiedName=None, typeName=None, ignoreRelationships=False, minExtInfo=False): results = None parameters = {} if isinstance(guid, list): guid_str = '&guid='.join(guid) else: guid_str = guid qualifiedName_params = dict() if isinstance(qualifiedName, list): qualifiedName_params = { f"attr_{idx}:qualifiedName": qname for idx, qname in enumerate(qualifiedName) } else: qualifiedName_params = {"attr_0:qualifiedName": qualifiedName} if qualifiedName and typeName: atlas_endpoint = self.endpoint_url + f"/entity/bulk/uniqueAttribute/type/{typeName}" parameters.update(qualifiedName_params) else: atlas_endpoint = self.endpoint_url + "/entity/bulk?guid={}".format(guid_str) parameters.update( {"ignoreRelationships": ignoreRelationships, "minExtInfo": minExtInfo}) getEntity = requests.get( atlas_endpoint, params=parameters, headers=self.authentication.get_authentication_headers() ) results = self._handle_response(getEntity) return results def get_single_entity(self, guid=None, ignoreRelationships=False, minExtInfo=False): results = None parameters = {} atlas_endpoint = self.endpoint_url + "/entity/guid/{}".format(guid) parameters.update( {"ignoreRelationships": ignoreRelationships, "minExtInfo": minExtInfo}) getEntity = requests.get( atlas_endpoint, params=parameters, headers=self.authentication.get_authentication_headers() ) results = self._handle_response(getEntity) return results def partial_update_entity(self, guid=None, typeName=None, qualifiedName=None, attributes={}): if guid and len(attributes) == 1: atlas_endpoint = self.endpoint_url + f"/entity/guid/{guid}" attribute_name = list(attributes.keys())[0] attribute_value = attributes[attribute_name] putEntity = requests.put( atlas_endpoint, json=attribute_value, params={"name": attribute_name}, headers=self.authentication.get_authentication_headers() ) elif guid: raise ValueError( "When using guid, attributes can only contain one key and value.") elif typeName and qualifiedName: atlas_endpoint = self.endpoint_url + f"/entity/uniqueAttribute/type/{typeName}" get_response = self.get_entity( qualifiedName=qualifiedName, typeName=typeName) try: entity = get_response["entities"][0] except KeyError: raise ValueError( f"The entity with qualifiedName {qualifiedName} and type {typeName} does not exist and cannot be updated.") entity["attributes"].update(attributes) entityInfo = {"entity": entity, "referredEntities": get_response["referredEntities"]} putEntity = requests.put( atlas_endpoint, json=entityInfo, params={"attr:qualifiedName": qualifiedName}, headers=self.authentication.get_authentication_headers() ) else: raise ValueError( "The provided combination of arguments is not supported. Either provide a guid or type name and qualified name") results = self._handle_response(putEntity) return results def get_entity_classification(self, guid, classificationName): atlas_endpoint = self.endpoint_url + f"/entity/guid/{guid}/classification/{classificationName}" getClassification = requests.get( atlas_endpoint, headers=self.authentication.get_authentication_headers() ) results = self._handle_response(getClassification) return results def get_entity_classifications(self, guid): atlas_endpoint = self.endpoint_url + f"/entity/guid/{guid}/classifications" getClassification = requests.get( atlas_endpoint, headers=self.authentication.get_authentication_headers() ) results = self._handle_response(getClassification) return results def get_entity_header(self, guid=None): results = None parameters = {} atlas_endpoint = self.endpoint_url + "/entity/guid/{}/header".format(guid) getEntity = requests.get( atlas_endpoint, params=parameters, headers=self.authentication.get_authentication_headers() ) results = self._handle_response(getEntity) return results
MIT License
aws/aws-xray-sdk-python
aws_xray_sdk/ext/httplib/patch.py
unpatch
python
def unpatch(): _PATCHED_MODULES.discard('httplib') setattr(httplib, PATCH_FLAG, False) unwrap(httplib.HTTPConnection, '_send_request') unwrap(httplib.HTTPConnection, 'getresponse') unwrap(httplib.HTTPResponse, 'read')
Unpatch any previously patched modules. This operation is idempotent.
https://github.com/aws/aws-xray-sdk-python/blob/0e1f935bd2040ee7dbf0625db7f7ad780c66fb37/aws_xray_sdk/ext/httplib/patch.py#L218-L228
from collections import namedtuple import sys import wrapt import fnmatch import urllib3.connection from aws_xray_sdk.core import xray_recorder from aws_xray_sdk.core.models import http from aws_xray_sdk.core.exceptions.exceptions import SegmentNotFoundException from aws_xray_sdk.core.patcher import _PATCHED_MODULES from aws_xray_sdk.ext.util import inject_trace_header, strip_url, unwrap, get_hostname if sys.version_info >= (3, 0, 0): PY2 = False httplib_client_module = 'http.client' import http.client as httplib else: PY2 = True httplib_client_module = 'httplib' import httplib _XRAY_PROP = '_xray_prop' _XRay_Data = namedtuple('xray_data', ['method', 'host', 'url']) _XRay_Ignore = namedtuple('xray_ignore', ['subclass', 'hostname', 'urls']) PATCH_FLAG = '__xray_patched' _XRAY_IGNORE = set() def add_ignored(subclass=None, hostname=None, urls=None): global _XRAY_IGNORE if subclass is not None or hostname is not None or urls is not None: urls = urls if urls is None else tuple(urls) _XRAY_IGNORE.add(_XRay_Ignore(subclass=subclass, hostname=hostname, urls=urls)) def reset_ignored(): global _XRAY_IGNORE _XRAY_IGNORE.clear() _ignored_add_default() def _ignored_add_default(): add_ignored(subclass='botocore.awsrequest.AWSHTTPConnection', urls=['/GetSamplingRules', '/SamplingTargets']) _ignored_add_default() def http_response_processor(wrapped, instance, args, kwargs, return_value, exception, subsegment, stack): xray_data = getattr(instance, _XRAY_PROP, None) if not xray_data: return subsegment.put_http_meta(http.METHOD, xray_data.method) subsegment.put_http_meta(http.URL, strip_url(xray_data.url)) if return_value: subsegment.put_http_meta(http.STATUS, return_value.status) xray_data = _XRay_Data('READ', xray_data.host, xray_data.url) setattr(return_value, _XRAY_PROP, xray_data) if exception: subsegment.add_exception(exception, stack) def _xray_traced_http_getresponse(wrapped, instance, args, kwargs): if not PY2 and kwargs.get('buffering', False): return wrapped(*args, **kwargs) xray_data = getattr(instance, _XRAY_PROP, None) if not xray_data: return wrapped(*args, **kwargs) return xray_recorder.record_subsegment( wrapped, instance, args, kwargs, name=get_hostname(xray_data.url), namespace='remote', meta_processor=http_response_processor, ) def http_send_request_processor(wrapped, instance, args, kwargs, return_value, exception, subsegment, stack): xray_data = getattr(instance, _XRAY_PROP, None) if not xray_data: return subsegment.put_http_meta(http.METHOD, xray_data.method) subsegment.put_http_meta(http.URL, strip_url(xray_data.url)) if exception: subsegment.add_exception(exception, stack) def _ignore_request(instance, hostname, url): global _XRAY_IGNORE module = instance.__class__.__module__ if module is None or module == str.__class__.__module__: subclass = instance.__class__.__name__ else: subclass = module + '.' + instance.__class__.__name__ for rule in _XRAY_IGNORE: subclass_match = subclass == rule.subclass if rule.subclass is not None else True host_match = fnmatch.fnmatch(hostname, rule.hostname) if rule.hostname is not None else True url_match = url in rule.urls if rule.urls is not None else True if url_match and host_match and subclass_match: return True return False def _send_request(wrapped, instance, args, kwargs): def decompose_args(method, url, body, headers, encode_chunked=False): if _ignore_request(instance, instance.host, url): return wrapped(*args, **kwargs) subsegment = None try: subsegment = xray_recorder.current_subsegment() except SegmentNotFoundException: pass if subsegment: inject_trace_header(headers, subsegment) if issubclass(instance.__class__, urllib3.connection.HTTPSConnection): ssl_cxt = getattr(instance, 'ssl_context', None) elif issubclass(instance.__class__, httplib.HTTPSConnection): ssl_cxt = getattr(instance, '_context', None) else: ssl_cxt = getattr(instance, 'ssl_context', None) scheme = 'https' if ssl_cxt and type(ssl_cxt).__name__ == 'SSLContext' else 'http' xray_url = '{}://{}{}'.format(scheme, instance.host, url) xray_data = _XRay_Data(method, instance.host, xray_url) setattr(instance, _XRAY_PROP, xray_data) return xray_recorder.record_subsegment( wrapped, instance, args, kwargs, name=get_hostname(xray_data.url), namespace='remote', meta_processor=http_send_request_processor ) return decompose_args(*args, **kwargs) def http_read_processor(wrapped, instance, args, kwargs, return_value, exception, subsegment, stack): xray_data = getattr(instance, _XRAY_PROP, None) if not xray_data: return subsegment.put_http_meta(http.METHOD, xray_data.method) subsegment.put_http_meta(http.URL, strip_url(xray_data.url)) subsegment.put_http_meta(http.STATUS, instance.status) if exception: subsegment.add_exception(exception, stack) def _xray_traced_http_client_read(wrapped, instance, args, kwargs): xray_data = getattr(instance, _XRAY_PROP, None) if not xray_data: return wrapped(*args, **kwargs) return xray_recorder.record_subsegment( wrapped, instance, args, kwargs, name=get_hostname(xray_data.url), namespace='remote', meta_processor=http_read_processor ) def patch(): if getattr(httplib, PATCH_FLAG, False): return setattr(httplib, PATCH_FLAG, True) wrapt.wrap_function_wrapper( httplib_client_module, 'HTTPConnection._send_request', _send_request ) wrapt.wrap_function_wrapper( httplib_client_module, 'HTTPConnection.getresponse', _xray_traced_http_getresponse ) wrapt.wrap_function_wrapper( httplib_client_module, 'HTTPResponse.read', _xray_traced_http_client_read )
Apache License 2.0
pygae/clifford
clifford/_multivector.py
MultiVector.lc
python
def lc(self, other) -> 'MultiVector': other, mv = self._checkOther(other, coerce=True) newValue = self.layout.lcmt_func(self.value, other.value) return self._newMV(newValue)
r"""The left-contraction of two multivectors, :math:`M\rfloor N`
https://github.com/pygae/clifford/blob/e63f8564d64d6a5dad5fbb415772eb8aecbc2d8f/clifford/_multivector.py#L635-L642
import numbers import math from typing import List, Set, Tuple, Union import warnings import numpy as np import clifford as cf import clifford.taylor_expansions as taylor_expansions from . import _settings from ._layout_helpers import layout_short_name class MultiVector(object): __array_priority__ = 100 def __init__(self, layout, value=None, string=None, *, dtype: np.dtype = np.float64) -> None: self.layout = layout if value is None: if string is None: self.value = np.zeros((self.layout.gaDims,), dtype=dtype) else: self.value = layout.parse_multivector(string).value else: self.value = np.array(value) if self.value.shape != (self.layout.gaDims,): raise ValueError( "value must be a sequence of length %s" % self.layout.gaDims) def __array__(self) -> 'cf.MVArray': return cf.array(self) def _checkOther(self, other, coerce=True) -> Tuple['MultiVector', bool]: if isinstance(other, MultiVector): if other.layout != self.layout: raise ValueError( "cannot operate on MultiVectors with different Layouts") else: return other, True elif isinstance(other, numbers.Number): if coerce: newOther = self._newMV(dtype=np.result_type(other)) newOther[()] = other return newOther, True else: return other, False else: return other, False def _newMV(self, newValue=None, *, dtype: np.dtype = None) -> 'MultiVector': if newValue is None and dtype is None: raise TypeError("Must specify either a type or value") return self.__class__(self.layout, newValue, dtype=dtype) def exp(self) -> 'MultiVector': return taylor_expansions.exp(self) def cos(self) -> 'MultiVector': return taylor_expansions.cos(self) def sin(self) -> 'MultiVector': return taylor_expansions.sin(self) def tan(self) -> 'MultiVector': return taylor_expansions.tan(self) def sinh(self) -> 'MultiVector': return taylor_expansions.sinh(self) def cosh(self) -> 'MultiVector': return taylor_expansions.cosh(self) def tanh(self) -> 'MultiVector': return taylor_expansions.tanh(self) def vee(self, other) -> 'MultiVector': return self.layout.MultiVector(value=self.layout.vee_func(self.value, other.value)) def __and__(self, other) -> 'MultiVector': return self.vee(other) def __mul__(self, other) -> 'MultiVector': other, mv = self._checkOther(other, coerce=False) if mv: newValue = self.layout.gmt_func(self.value, other.value) else: if isinstance(other, np.ndarray): obj = self.__array__() return obj*other newValue = other * self.value return self._newMV(newValue) def __rmul__(self, other) -> 'MultiVector': other, mv = self._checkOther(other, coerce=False) if mv: newValue = self.layout.gmt_func(other.value, self.value) else: if isinstance(other, np.ndarray): obj = self.__array__() return other*obj newValue = other*self.value return self._newMV(newValue) def __xor__(self, other) -> 'MultiVector': other, mv = self._checkOther(other, coerce=False) if mv: newValue = self.layout.omt_func(self.value, other.value) else: if isinstance(other, np.ndarray): obj = self.__array__() return obj^other newValue = other*self.value return self._newMV(newValue) def __rxor__(self, other) -> 'MultiVector': other, mv = self._checkOther(other, coerce=False) if mv: newValue = self.layout.omt_func(other.value, self.value) else: if isinstance(other, np.ndarray): obj = self.__array__() return other^obj newValue = other * self.value return self._newMV(newValue) def __or__(self, other) -> 'MultiVector': other, mv = self._checkOther(other) if mv: newValue = self.layout.imt_func(self.value, other.value) else: if isinstance(other, np.ndarray): obj = self.__array__() return obj|other return self._newMV(dtype=np.result_type(self.value.dtype, other)) return self._newMV(newValue) __ror__ = __or__ def __add__(self, other) -> 'MultiVector': other, mv = self._checkOther(other) if not mv: if isinstance(other, np.ndarray): obj = self.__array__() return obj + other newValue = self.value + other.value return self._newMV(newValue) __radd__ = __add__ def __sub__(self, other) -> 'MultiVector': other, mv = self._checkOther(other) if not mv: if isinstance(other, np.ndarray): obj = self.__array__() return obj - other newValue = self.value - other.value return self._newMV(newValue) def __rsub__(self, other) -> 'MultiVector': other, mv = self._checkOther(other) if not mv: if isinstance(other, np.ndarray): obj = self.__array__() return other - obj newValue = other.value - self.value return self._newMV(newValue) def right_complement(self) -> 'MultiVector': return self.layout.MultiVector(value=self.layout.right_complement_func(self.value)) def left_complement(self) -> 'MultiVector': return self.layout.MultiVector(value=self.layout.left_complement_func(self.value)) def __truediv__(self, other) -> 'MultiVector': other, mv = self._checkOther(other, coerce=False) if mv: return self * other.inv() else: if isinstance(other, np.ndarray): obj = self.__array__() return obj/other newValue = self.value / other return self._newMV(newValue) def __rtruediv__(self, other) -> 'MultiVector': other, mv = self._checkOther(other) if isinstance(other, np.ndarray): obj = self.__array__() return other / obj return other * self.inv() def __pow__(self, other) -> 'MultiVector': if not isinstance(other, (int, float)): raise ValueError("exponent must be a Python int or float") if abs(round(other) - other) > _settings._eps: raise ValueError("exponent must have no fractional part") other = int(round(other)) if other == 0: return self._newMV(dtype=self.value.dtype) + 1 newMV = self._newMV(np.array(self.value)) for i in range(1, other): newMV = newMV * self return newMV def __rpow__(self, other) -> 'MultiVector': newMV = taylor_expansions.exp(math.log(other) * self) return newMV def __lshift__(self, other) -> 'MultiVector': return self.lc(other) def __neg__(self) -> 'MultiVector': newValue = -self.value return self._newMV(newValue) def as_array(self) -> np.ndarray: return self.value def __pos__(self) -> 'MultiVector': newValue = self.value + 0 return self._newMV(newValue) def mag2(self) -> numbers.Number: mv_val = self.layout.gmt_func(self.layout.adjoint_func(self.value), self.value) return mv_val[0] def __abs__(self) -> numbers.Number: return np.sqrt(abs(self.mag2())) def adjoint(self) -> 'MultiVector': return self._newMV(self.layout.adjoint_func(self.value)) __invert__ = adjoint def __int__(self) -> int: return int(self.__float__()) def __float__(self) -> float: if self.isScalar(): return float(self[()]) else: raise ValueError("non-scalar coefficients are non-zero") def __len__(self) -> int: warnings.warn( "Treating MultiVector objects like a sequence is deprecated. " "To access the coefficients as a sequence, use the `.value` attribute.", DeprecationWarning, stacklevel=2) return self.layout.gaDims def __getitem__(self, key: Union['MultiVector', tuple, int]) -> numbers.Number: if isinstance(key, MultiVector): inds, = np.nonzero(key.value) if len(inds) > 1: raise ValueError("Must be a single basis element") return self.value[inds[0]] elif isinstance(key, tuple): sign, idx = self.layout._sign_and_index_from_tuple(key) return sign*self.value[idx] else: warnings.warn( "Treating MultiVector objects like a sequence is deprecated. " "To access the coefficients as a sequence, use the `.value` attribute.", DeprecationWarning, stacklevel=2) return self.value[key] def __setitem__(self, key: Union[tuple, int], value: numbers.Number) -> None: if isinstance(key, tuple): sign, idx = self.layout._sign_and_index_from_tuple(key) self.value[idx] = sign*value else: warnings.warn( "Treating MultiVector objects like a sequence is deprecated. " "To access the coefficients as a sequence, use the `.value` attribute.", DeprecationWarning, stacklevel=2) self.value[key] = value def __call__(self, other, *others) -> 'MultiVector': if isinstance(other, MultiVector): return other.project(self) else: grade = other if len(others) != 0: return sum([self.__call__(k) for k in (other,)+others]) if not np.issubdtype(type(grade), np.integer): raise ValueError("grade must be an integer") mask = self.layout.grade_mask(grade) newValue = np.multiply(mask, self.value) return self._newMV(newValue) def __str__(self) -> str: s = '' p = _settings._print_precision for grade, name, coeff in zip(self.layout._basis_blade_order.grades, self.layout.names, self.value): if s: seps = (' + ', ' - ') else: seps = ('', '-') if abs(coeff) < _settings._eps: continue else: if coeff < 0: sep = seps[1] sign = -1 else: sep = seps[0] sign = 1 if np.issubdtype(self.value.dtype, np.inexact): abs_coeff = sign*np.round(coeff, p) else: abs_coeff = sign*coeff if grade == 0: s = '%s%s%s' % (s, sep, abs_coeff) else: s = '%s%s(%s^%s)' % (s, sep, abs_coeff, name) if s: return s else: return '0' def __repr__(self) -> str: if _settings._pretty: return self.__str__() if self.value.dtype != np.float64: dtype_str = ", dtype={}".format(self.value.dtype) else: dtype_str = None l_name = layout_short_name(self.layout) args = dict(v=list(self.value), d=dtype_str) if l_name is not None: return "{l}.MultiVector({v!r}{d})".format(l=l_name, **args) else: return "{l!r}.MultiVector({v!r}{d})".format(l=self.layout, **args) def _repr_pretty_(self, p, cycle): if cycle: raise RuntimeError("Should not be cyclic") if _settings._pretty: p.text(str(self)) return l_name = layout_short_name(self.layout) if l_name is not None: prefix = "{}.MultiVector(".format(l_name) include_layout = False else: include_layout = True prefix = "MultiVector(" with p.group(len(prefix), prefix, ")"): if include_layout: p.pretty(self.layout) p.text(",") p.breakable() p.text(repr(list(self.value))) if self.value.dtype != np.float64: p.text(",") p.breakable() p.text("dtype={}".format(self.value.dtype)) def __bool__(self) -> bool: zeroes = np.absolute(self.value) < _settings._eps return not zeroes.all() def __eq__(self, other) -> bool: other, mv = self._checkOther(other) if not mv: return NotImplemented if (np.absolute(self.value - other.value) < _settings._eps).all(): return True else: return False def clean(self, eps=None) -> 'MultiVector': if eps is None: eps = _settings._eps mask = np.absolute(self.value) > eps self.value = mask * self.value return self def round(self, eps=None) -> 'MultiVector': if eps is None: eps = _settings._eps self.value = np.around(self.value, eps) return self
BSD 3-Clause New or Revised License
wavefronthq/python-client
wavefront_api_client/models/access_control_list_read_dto.py
AccessControlListReadDTO.to_dict
python
def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(AccessControlListReadDTO, dict): for key, value in self.items(): result[key] = value return result
Returns the model properties as a dict
https://github.com/wavefronthq/python-client/blob/e410ce0dd8a2334e995456f4f3d44e0f04664a3a/wavefront_api_client/models/access_control_list_read_dto.py#L134-L159
import pprint import re import six from wavefront_api_client.configuration import Configuration class AccessControlListReadDTO(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'entity_id': 'str', 'modify_acl': 'list[AccessControlElement]', 'view_acl': 'list[AccessControlElement]' } attribute_map = { 'entity_id': 'entityId', 'modify_acl': 'modifyAcl', 'view_acl': 'viewAcl' } def __init__(self, entity_id=None, modify_acl=None, view_acl=None, _configuration=None): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._entity_id = None self._modify_acl = None self._view_acl = None self.discriminator = None if entity_id is not None: self.entity_id = entity_id if modify_acl is not None: self.modify_acl = modify_acl if view_acl is not None: self.view_acl = view_acl @property def entity_id(self): return self._entity_id @entity_id.setter def entity_id(self, entity_id): self._entity_id = entity_id @property def modify_acl(self): return self._modify_acl @modify_acl.setter def modify_acl(self, modify_acl): self._modify_acl = modify_acl @property def view_acl(self): return self._view_acl @view_acl.setter def view_acl(self, view_acl): self._view_acl = view_acl
Apache License 2.0
phuks-co/throat
app/views/auth.py
register
python
def register(): if current_user.is_authenticated: return redirect(url_for("home.index")) form = RegistrationForm() if email_validation_is_required(): del form.email_optional else: del form.email_required captcha = misc.create_captcha() if not config.site.enable_registration: return engine.get_template("user/registration_disabled.html").render({}) if not form.validate(): return engine.get_template("user/register.html").render( {"error": misc.get_errors(form, True), "regform": form, "captcha": captcha} ) if not misc.validate_captcha(form.ctok.data, form.captcha.data): return engine.get_template("user/register.html").render( {"error": _("Invalid captcha."), "regform": form, "captcha": captcha} ) if not misc.allowedNames.match(form.username.data): return engine.get_template("user/register.html").render( { "error": _("Username has invalid characters."), "regform": form, "captcha": captcha, } ) existing_user = None try: existing_user = User.get(fn.Lower(User.name) == form.username.data.lower()) if ( existing_user.status != UserStatus.PROBATION or (datetime.utcnow() - existing_user.joindate).days < 2 ): return engine.get_template("user/register.html").render( { "error": _("Username is not available."), "regform": form, "captcha": captcha, } ) except User.DoesNotExist: pass if email_validation_is_required(): email = form.email_required.data else: email = form.email_optional.data if email: email = normalize_email(email) if email: if is_domain_banned(email, domain_type="email"): return engine.get_template("user/register.html").render( { "error": _("We do not accept emails from your email provider."), "regform": form, "captcha": captcha, } ) user_by_email = auth_provider.get_user_by_email(email) if user_by_email is not None and user_by_email != existing_user: return engine.get_template("user/register.html").render( { "error": _("E-mail address is already in use."), "regform": form, "captcha": captcha, } ) if config.site.enable_security_question: if form.securityanswer.data.lower() != session["sa"].lower(): return engine.get_template("user/register.html").render( { "error": _("Incorrect answer for security question."), "regform": form, "captcha": captcha, } ) if config.site.require_invite_code: if not form.invitecode.data: return engine.get_template("user/register.html").render( { "error": _("Invalid invite code."), "regform": form, "captcha": captcha, } ) try: InviteCode.get_valid(form.invitecode.data) except InviteCode.DoesNotExist: return engine.get_template("user/register.html").render( { "error": _("Invalid invite code."), "regform": form, "captcha": captcha, } ) InviteCode.update(uses=InviteCode.uses + 1).where( InviteCode.code == form.invitecode.data ).execute() user = create_user( form.username.data, form.password.data, email, form.invitecode.data, existing_user, ) if email_validation_is_required(): send_login_link_email(user) session["reg"] = { "uid": user.uid, "email": user.email, "now": datetime.utcnow(), } return redirect(url_for("auth.confirm_registration")) else: theuser = misc.load_user(user.uid) login_user(theuser) session["remember_me"] = False return redirect(url_for("wiki.welcome"))
Endpoint for the registration form
https://github.com/phuks-co/throat/blob/27c7c18faa371def668bdbe6f7e95c6bf32a1829/app/views/auth.py#L98-L232
from urllib.parse import urlparse from datetime import datetime, timedelta import uuid import re import requests from peewee import fn from flask import ( Blueprint, request, redirect, abort, url_for, session, current_app, flash, jsonify, ) from flask_login import current_user, login_user, login_required from flask_babel import _ from itsdangerous import URLSafeTimedSerializer from itsdangerous.exc import SignatureExpired, BadSignature from .. import misc from ..config import config from ..auth import auth_provider, email_validation_is_required from ..auth import normalize_email, create_user from ..forms import LoginForm, RegistrationForm, ResendConfirmationForm from ..misc import engine, send_email, is_domain_banned, gevent_required from ..misc import ratelimit, AUTH_LIMIT, SIGNUP_LIMIT from ..models import User, UserStatus, InviteCode, rconn bp = Blueprint("auth", __name__) def sanitize_serv(serv): serv = serv.replace("%253A", "%3A") return serv.replace("%252F", "%2F") def generate_cas_token(uid): token = str(uuid.uuid4()) rconn.setex(name="cas-" + token, value=uid, time=30) return token def handle_cas_ok(uid): return redirect( sanitize_serv(request.args.get("service")) + "&ticket=" + generate_cas_token(uid) ) @bp.route("/proxyValidate", methods=["GET"]) @ratelimit(AUTH_LIMIT) def sso_proxy_validate(): if not request.args.get("ticket") or not request.args.get("service"): abort(400) pipe = rconn.pipeline() pipe.get("cas-" + request.args.get("ticket")) pipe.delete("cas-" + request.args.get("ticket")) red_c = pipe.execute() if red_c: try: user = User.get((User.uid == red_c[0].decode()) & (User.status << (0, 100))) except User.DoesNotExist: return ( "<cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'>" '<cas:authenticationFailure code="INVALID_TICKET">' + _("User not found or invalid ticket") + "</cas:authenticationFailure></cas:serviceResponse>", 401, ) return ( "<cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'>" f"<cas:authenticationSuccess><cas:user>{user.name.lower()}</cas:user>" "</cas:authenticationSuccess></cas:serviceResponse>", 200, ) else: return ( "<cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'>" '<cas:authenticationFailure code="INVALID_TICKET">' + _("User not found or invalid ticket") + "</cas:authenticationFailure></cas:serviceResponse>", 401, ) @bp.route("/register", methods=["GET", "POST"]) @gevent_required @ratelimit(SIGNUP_LIMIT, methods=["POST"])
MIT License
magenta/ddsp
ddsp/training/nn.py
straight_through_softmax
python
def straight_through_softmax(logits): probs = tf.nn.softmax(logits) one_hot = tfp.distributions.OneHotCategorical(probs=probs) sample = tf.cast(one_hot.sample(), tf.float32) p_sample = probs * sample sample = tf.stop_gradient(sample - p_sample) + p_sample return sample, probs
Straight-through estimator of a one-hot categorical distribution.
https://github.com/magenta/ddsp/blob/56266e9c255019df050a3c20255caa2beaa912ac/ddsp/training/nn.py#L331-L338
import inspect from ddsp import core from ddsp import losses from ddsp import spectral_ops import gin import tensorflow as tf import tensorflow_addons as tfa import tensorflow_probability as tfp tfk = tf.keras tfkl = tfk.layers class DictLayer(tfkl.Layer): def __init__(self, input_keys=None, output_keys=None, **kwargs): super().__init__(**kwargs) if not input_keys: input_keys = self.get_argument_names('call') self.default_input_keys = list(self.get_default_argument_names('call')) self.default_input_values = list(self.get_default_argument_values('call')) else: self.default_input_keys = [] self.default_input_values = [] output_keys = output_keys or self.get_return_annotations('call') self.input_keys = list(input_keys) self.output_keys = list(output_keys) @property def all_input_keys(self): return self.input_keys + self.default_input_keys @property def n_inputs(self): return len(self.all_input_keys) def __call__(self, *inputs, **kwargs): input_dict = {} for v in inputs: if isinstance(v, dict): input_dict.update(v) inputs = [v for v in inputs if not isinstance(v, dict)] for key in self.all_input_keys: if key in kwargs: input_dict[key] = kwargs[key] kwargs = {k: v for k, v in kwargs.items() if k not in self.all_input_keys} for key in self.input_keys: try: inputs.append(core.nested_lookup(key, input_dict)) except KeyError: pass for key, value in zip(self.default_input_keys, self.default_input_values): try: inputs.append(core.nested_lookup(key, input_dict)) except KeyError: if len(inputs) < self.n_inputs: inputs.append(value) if len(inputs) != self.n_inputs: raise TypeError(f'{len(inputs)} input tensors extracted from inputs' '(including default args) but the layer expects ' f'{self.n_inputs} tensors.\n' f'Input keys: {self.input_keys}\n' f'Default keys: {self.default_input_keys}\n' f'Default values: {self.default_input_values}\n' f'Input dictionaries: {input_dict}\n' f'Input Tensors (Args, Dicts, and Defaults): {inputs}\n') outputs = super().__call__(*inputs, **kwargs) if isinstance(outputs, dict): return outputs else: outputs = core.make_iterable(outputs) if len(self.output_keys) != len(outputs): raise ValueError(f'Output keys ({self.output_keys}) must have the same' f'length as outputs ({outputs})') return dict(zip(self.output_keys, outputs)) def get_argument_names(self, method): spec = inspect.getfullargspec(getattr(self, method)) if spec.defaults: n_defaults = len(spec.defaults) return spec.args[1:-n_defaults] else: return spec.args[1:] def get_default_argument_names(self, method): spec = inspect.getfullargspec(getattr(self, method)) if spec.defaults: n_defaults = len(spec.defaults) return spec.args[-n_defaults:] else: return [] def get_default_argument_values(self, method): spec = inspect.getfullargspec(getattr(self, method)) if spec.defaults: return spec.defaults else: return [] def get_return_annotations(self, method): spec = inspect.getfullargspec(getattr(self, method)) return core.make_iterable(spec.annotations['return']) class OutputSplitsLayer(DictLayer): def __init__(self, input_keys=None, output_splits=(('amps', 1), ('harmonic_distribution', 40)), **kwargs): self.output_splits = output_splits self.n_out = sum([v[1] for v in output_splits]) self.dense_out = tfkl.Dense(self.n_out) input_keys = input_keys or self.get_argument_names('compute_output') output_keys = [v[0] for v in output_splits] super().__init__(input_keys=input_keys, output_keys=output_keys, **kwargs) def call(self, *inputs, **unused_kwargs): output = self.compute_output(*inputs) return split_to_dict(self.dense_out(output), self.output_splits) def compute_output(self, *inputs): raise NotImplementedError def ensure_4d(x): if len(x.shape) == 2: return x[:, tf.newaxis, tf.newaxis, :] elif len(x.shape) == 3: return x[:, :, tf.newaxis, :] else: return x def inv_ensure_4d(x, n_dims): if n_dims == 2: return x[:, 0, 0, :] if n_dims == 3: return x[:, :, 0, :] else: return x @gin.register def split_to_dict(tensor, tensor_splits): labels = [v[0] for v in tensor_splits] sizes = [v[1] for v in tensor_splits] tensors = tf.split(tensor, sizes, axis=-1) return dict(zip(labels, tensors)) def get_nonlinearity(nonlinearity): try: return tf.keras.activations.get(nonlinearity) except ValueError: pass return getattr(tf.nn, nonlinearity)
Apache License 2.0
google-research/pyreach
pyreach/impl/actions_impl.py
ActionsImpl.action_names
python
def action_names(self) -> Tuple[str, ...]: return tuple([action.get_name() for action in self._actions])
Return the list of available action template names.
https://github.com/google-research/pyreach/blob/83cac8e235ba1392dcdc6b8d19202c3eff3ad9a6/pyreach/impl/actions_impl.py#L903-L905
import enum import json import logging import threading from typing import Any, Dict, List, Optional, Set, Tuple, Union, Type from pyreach import actionsets from pyreach.common.python import types_gen from pyreach.impl import device_base class PreconditionType(enum.Enum): DIGITAL_IO = 0 class Precondition: _precondition_type: PreconditionType _digital_io_value: bool _digital_io_number: int _max_digital_io_number: int def __init__(self, precondition_type: PreconditionType, digital_io_value: bool, digital_io_number: int, max_digital_io_number: int) -> None: self._precondition_type = precondition_type self._digital_io_value = digital_io_value self._digital_io_number = digital_io_number self._max_digital_io_number = max_digital_io_number def get_precondition_type(self) -> PreconditionType: return self._precondition_type def get_digital_io_value(self) -> bool: return self._digital_io_value def get_digital_io_number(self) -> int: return self._digital_io_number def get_max_digital_io_number(self) -> int: return self._max_digital_io_number @classmethod def from_json(cls, json_data: Dict[str, Any]) -> Optional["Precondition"]: if (not isinstance(json_data.get("_preconditionType", ""), int) or json_data["_preconditionType"] < 0 or json_data["_preconditionType"] >= len(list(PreconditionType))): logging.warning("Invalid _preconditionType in %s", json_data) return None if not isinstance(json_data.get("_digitalIONumber"), int): logging.warning("Invalid _digitalIONumber in %s", json_data) return None if not isinstance(json_data.get("_digitalIOValue"), bool): logging.warning("Invalid _digitalIOValue in %s", json_data) return None if not isinstance(json_data.get("_maxDigitalIONumber"), int): logging.warning("Invalid _maxDigitalIONumber in %s", json_data) return None return Precondition( list(PreconditionType)[json_data["_preconditionType"]], json_data["_digitalIOValue"], json_data["_digitalIONumber"], json_data["_maxDigitalIONumber"]) TOOL_INTERACTION_POINT_OBJECT_TYPES = ("Torus", "Capsule", "Cube", "Cylinder", "Sphere", "Blister", "BlisterBatchInput", "Deodorant", "DeodorantKitPose", "Empty") class ToolInteractionPoint: _name: str _tip_object_type: str _pick_data: types_gen.ExperimentalCommandData _dimensions: types_gen.Vec3d _padding: Tuple[float, float, float, float, float, float] _tip_pos: types_gen.Vec3d _tip_rot: types_gen.Quaternion3d _local_go_pos: types_gen.Vec3d _local_go_rot: types_gen.Quaternion3d def __init__(self, name: str, tip_object_type: str, pick_data: types_gen.ExperimentalCommandData, dimensions: types_gen.Vec3d, padding: Tuple[float, float, float, float, float, float], tip_pos: types_gen.Vec3d, tip_rot: types_gen.Quaternion3d, local_go_pos: types_gen.Vec3d, local_go_rot: types_gen.Quaternion3d) -> None: self._name = name self._tip_object_type = tip_object_type self._pick_data = pick_data self._dimensions = dimensions self._padding = padding self._tip_pos = tip_pos self._tip_rot = tip_rot self._local_go_pos = local_go_pos self._local_go_rot = local_go_rot def get_name(self) -> str: return self._name def get_tip_object_type(self) -> str: return self._tip_object_type def get_pick_data(self) -> types_gen.ExperimentalCommandData: return self._pick_data def get_dimensions(self) -> types_gen.Vec3d: return self._dimensions def get_padding(self) -> Tuple[float, float, float, float, float, float]: return self._padding def get_tip_pos(self) -> types_gen.Vec3d: return self._tip_pos def get_tip_rot(self) -> types_gen.Quaternion3d: return self._tip_rot def get_local_go_pos(self) -> types_gen.Vec3d: return self._local_go_pos def get_local_go_rot(self) -> types_gen.Quaternion3d: return self._local_go_rot @classmethod def from_json(cls, json_data: Dict[str, Any]) -> Optional["ToolInteractionPoint"]: if not isinstance(json_data.get("Name"), str): logging.warning("Action ToolInteractionPoint Name invalid: %s", json_data) return None if (json_data.get("TIPObjectType") not in TOOL_INTERACTION_POINT_OBJECT_TYPES): logging.warning("Action ToolInteractionPoint TIPObjectType invalid: %s", json_data) return None if not isinstance(json_data.get("PickData"), dict): logging.warning("Action ToolInteractionPoint PickData invalid: %s", json_data) return None try: pick_data = types_gen.ExperimentalCommandData.from_json( json_data["PickData"]) except ValueError as ex: logging.warning("Action ToolInteractionPoint PickData invalid: %s %s", json_data, ex) return None dimensions = _from_json_vector3(json_data.get("Dimensions")) if dimensions is None: logging.warning("Action ToolInteractionPoint Dimensions invalid: %s", json_data) return None if not isinstance(json_data.get("Padding"), list): logging.warning("Action ToolInteractionPoint Padding invalid: %s", json_data) return None for padding_element in json_data["Padding"]: if not (isinstance(padding_element, float) or isinstance(padding_element, int)): logging.warning("Action ToolInteractionPoint Padding invalid: %s", json_data) return None if len(json_data["Padding"]) != 6: logging.warning("Action ToolInteractionPoint Padding invalid: %s", json_data) return None padding = (float(json_data["Padding"][0]), float(json_data["Padding"][1]), float(json_data["Padding"][2]), float(json_data["Padding"][3]), float(json_data["Padding"][4]), float(json_data["Padding"][5])) tip_pos = _from_json_vector3(json_data.get("TIPPos")) if tip_pos is None: logging.warning("Action ToolInteractionPoint TIPPos invalid: %s", json_data) return None tip_rot = _from_json_quaternion(json_data.get("TIPRot")) if tip_rot is None: logging.warning("Action ToolInteractionPoint TIPRot invalid: %s", json_data) return None local_go_pos = _from_json_vector3(json_data.get("LocalGOPos")) if local_go_pos is None: logging.warning("Action ToolInteractionPoint LocalGOPos invalid: %s", json_data) return None local_go_rot = _from_json_quaternion(json_data.get("LocalGORot")) if local_go_rot is None: logging.warning("Action ToolInteractionPoint LocalGORot invalid: %s", json_data) return None return ToolInteractionPoint(json_data["Name"], json_data["TIPObjectType"], pick_data, dimensions, padding, tip_pos, tip_rot, local_go_pos, local_go_rot) class ActionStepParentType(enum.Enum): ABSOLUTE = 0 TIP_INPUT = 1 OTHER_STEP = 2 class ActionStep: _tip_input_idx: int _parent_type: ActionStepParentType _pos: types_gen.Vec3d _rot: types_gen.Quaternion3d _delay: float _radius: float _velocity: float _acceleration: float _wait: float _parent_step_idx: int _use_process_mode: bool _individual_velocity_acceleration: float _use_force_mode: bool _use_servo_j_mode: bool _use_skip_move: bool _set_digital_io: bool _set_tool_digital_io: bool _set_digital_io_number: int _set_digital_io_value: bool _set_tool_digital_io_number: int _set_tool_digital_io_value: bool _set_capability: bool _set_capability_name: str _set_capability_type: str _set_capability_value: bool _set_capability_io_type: str _randomized_offset: bool _randomized_offset_radius_cm: float _acquire_image_tag: str _acquire_image_mode: int def __init__(self, tip_input_idx: int, parent_type: ActionStepParentType, pos: types_gen.Vec3d, rot: types_gen.Quaternion3d, delay: float, radius: float, velocity: float, acceleration: float, wait: float, parent_step_idx: int, use_process_mode: bool, individual_velocity_acceleration: float, use_force_mode: bool, use_servo_j_mode: bool, use_skip_move: bool, set_digital_io: bool, set_tool_digital_io: bool, set_digital_io_number: int, set_digital_io_value: bool, set_tool_digital_io_number: int, set_tool_digital_io_value: bool, set_capability: bool, set_capability_name: str, set_capability_type: str, set_capability_value: bool, set_capability_io_type: str, randomized_offset: bool, randomized_offset_radius_cm: float, acquire_image_tag: str, acquire_image_mode: int) -> None: self._tip_input_idx = tip_input_idx self._parent_type = parent_type self._pos = pos self._rot = rot self._delay = delay self._radius = radius self._velocity = velocity self._acceleration = acceleration self._wait = wait self._parent_step_idx = parent_step_idx self._use_process_mode = use_process_mode self._individual_velocity_acceleration = individual_velocity_acceleration self._use_force_mode = use_force_mode self._use_servo_j_mode = use_servo_j_mode self._use_skip_move = use_skip_move self._set_digital_io = set_digital_io self._set_tool_digital_io = set_tool_digital_io self._set_digital_io_number = set_digital_io_number self._set_digital_io_value = set_digital_io_value self._set_tool_digital_io_number = set_tool_digital_io_number self._set_tool_digital_io_value = set_tool_digital_io_value self._set_capability = set_capability self._set_capability_name = set_capability_name self._set_capability_type = set_capability_type self._set_capability_value = set_capability_value self._set_capability_io_type = set_capability_io_type self._randomized_offset = randomized_offset self._randomized_offset_radius_cm = randomized_offset_radius_cm self._acquire_image_tag = acquire_image_tag self._acquire_image_mode = acquire_image_mode def get_tip_input_idx(self) -> int: return self._tip_input_idx def get_parent_type(self) -> ActionStepParentType: return self._parent_type def get_pos(self) -> types_gen.Vec3d: return self._pos def get_rot(self) -> types_gen.Quaternion3d: return self._rot def get_delay(self) -> float: return self._delay def get_radius(self) -> float: return self._radius def get_velocity(self) -> float: return self._velocity def get_acceleration(self) -> float: return self._acceleration def get_wait(self) -> float: return self._wait def get_parent_step_idx(self) -> int: return self._parent_step_idx def get_use_process_mode(self) -> bool: return self._use_process_mode def get_individual_velocity_acceleration(self) -> float: return self._individual_velocity_acceleration def get_use_force_mode(self) -> bool: return self._use_force_mode def get_use_servo_j_mode(self) -> bool: return self._use_servo_j_mode def get_use_skip_move(self) -> bool: return self._use_skip_move def get_set_digital_io(self) -> bool: return self._set_digital_io def get_set_tool_digital_io(self) -> bool: return self._set_tool_digital_io def get_set_digital_io_number(self) -> int: return self._set_digital_io_number def get_set_digital_io_value(self) -> bool: return self._set_digital_io_value def get_set_tool_digital_io_number(self) -> int: return self._set_tool_digital_io_number def get_set_tool_digital_io_value(self) -> bool: return self._set_tool_digital_io_value def get_set_capability(self) -> bool: return self._set_capability def get_set_capability_name(self) -> str: return self._set_capability_name def get_set_capability_type(self) -> str: return self._set_capability_type def get_set_capability_value(self) -> bool: return self._set_capability_value def get_set_capability_io_type(self) -> str: return self._set_capability_io_type def get_randomized_offset(self) -> bool: return self._randomized_offset def get_randomized_offset_radius_cm(self) -> float: return self._randomized_offset_radius_cm def get_acquire_image_tag(self) -> str: return self._acquire_image_tag def get_acquire_image_mode(self) -> int: return self._acquire_image_mode @classmethod def from_json(cls, json_data: Dict[str, Any]) -> Optional["ActionStep"]: remap = { "TIPInputIdx": "_tipInputIdx", "ParentType": "_parentType", "Delay": "_delay", "Radius": "_radius", "Velocity": "_velocity", "Acceleration": "_acceleration", "wait": "_wait", "Wait": "_wait", "ParentStepIdx": "_parentStepIdx", "UseProcessMode": "_useProcessMode", "setDigitalIO": "_setDigitalIO", "setDigitalIOValue": "_setDigitalIOValue", } for from_key, to_key in remap.items(): if from_key in json_data: json_data[to_key] = json_data[from_key] del json_data[from_key] if not isinstance(json_data.get("_tipInputIdx"), int): logging.warning("Action Step _tipInputIdx invalid: %s", json_data) return None if (not isinstance(json_data.get("_parentType"), int) or not 0 <= json_data["_parentType"] < len(list(ActionStepParentType))): logging.warning("Action Step _parentType invalid: %s", json_data) return None pos = _from_json_vector3(json_data.get("pos")) if pos is None: logging.warning("Action Step pos invalid: %s", json_data) return None rot = _from_json_quaternion(json_data.get("rot")) if rot is None: logging.warning("Action Step rot invalid: %s", json_data) return None if not (isinstance(json_data.get("_delay"), float) or isinstance(json_data.get("_delay"), int)): logging.warning("Action Step _delay invalid: %s", json_data) return None if not (isinstance(json_data.get("_radius"), float) or isinstance(json_data.get("_radius"), int)): logging.warning("Action Step _radius invalid: %s", json_data) return None if not (isinstance(json_data.get("_velocity"), float) or isinstance(json_data.get("_velocity"), int)): logging.warning("Action Step _velocity invalid: %s", json_data) return None if not (isinstance(json_data.get("_acceleration"), float) or isinstance(json_data.get("_acceleration"), int)): logging.warning("Action Step _acceleration invalid: %s", json_data) return None if not (isinstance(json_data.get("_wait"), float) or isinstance(json_data.get("_wait"), int)): logging.warning("Action Step _wait invalid: %s", json_data) return None if not (isinstance(json_data.get("_parentStepIdx"), float) or isinstance(json_data.get("_parentStepIdx"), int)): logging.warning("Action Step _parentStepIdx invalid: %s", json_data) return None if not isinstance(json_data.get("_useProcessMode"), bool): logging.warning("Action Step _useProcessMode invalid: %s", json_data) return None if not isinstance(json_data.get("_individualVelocityAcceleration"), bool): logging.warning("Action Step _individualVelocityAcceleration invalid: %s", json_data) return None if not isinstance(json_data.get("_useForceMode"), bool): logging.warning("Action Step _useForceMode invalid: %s", json_data) return None if not isinstance(json_data.get("_useServoJMode"), bool): logging.warning("Action Step _useServoJMode invalid: %s", json_data) return None if not isinstance(json_data.get("_useSkipMove"), bool): logging.warning("Action Step _useSkipMove invalid: %s", json_data) return None if not isinstance(json_data.get("_setDigitalIO"), bool): logging.warning("Action Step _setDigitalIO invalid: %s", json_data) return None if not isinstance(json_data.get("_setToolDigitalIO"), bool): logging.warning("Action Step _setToolDigitalIO invalid: %s", json_data) return None if not isinstance(json_data.get("_setDigitalIONumber"), int): logging.warning("Action Step _setDigitalIONumber invalid: %s", json_data) return None if json_data.get("_setToolDigitalIO", False) and not isinstance( json_data.get("_setToolDigitalIONumber"), int): logging.warning("Action Step _setToolDigitalIONumber invalid: %s", json_data) return None if json_data.get("_setToolDigitalIO", False) and not isinstance( json_data.get("_setToolDigitalIOValue"), bool): logging.warning("Action Step _setToolDigitalIOValue invalid: %s", json_data) return None if not isinstance(json_data.get("_setCapability"), bool): logging.warning("Action Step _setCapability invalid: %s", json_data) return None if not isinstance(json_data.get("_setCapabilityName"), str): logging.warning("Action Step _setCapabilityName invalid: %s", json_data) return None if not isinstance(json_data.get("_setCapabilityType"), str): logging.warning("Action Step _setCapabilityType invalid: %s", json_data) return None if not isinstance(json_data.get("_setCapabilityValue"), bool): logging.warning("Action Step _setCapabilityValue invalid: %s", json_data) return None if not isinstance(json_data.get("_setCapabilityIOType"), str): logging.warning("Action Step _setCapabilityIOType invalid: %s", json_data) return None if not isinstance(json_data.get("_randomizedOffset"), bool): logging.warning("Action Step _randomizedOffset invalid: %s", json_data) return None if not (isinstance(json_data.get("_randomizedOffsetRadiusCM"), float) or isinstance(json_data.get("_randomizedOffsetRadiusCM"), int)): logging.warning("Action Step _randomizedOffsetRadiusCM invalid: %s", json_data) return None if not isinstance(json_data.get("_acquireImageTag", ""), str): logging.warning("Action Step _acquireImageTag invalid: %s", json_data) return None if not isinstance(json_data.get("_acquireImageMode", 0), int): logging.warning("Action Step _acquireImageMode invalid: %s", json_data) return None return ActionStep( json_data["_tipInputIdx"], list(ActionStepParentType)[json_data["_parentType"]], pos, rot, float(json_data["_delay"]), float(json_data["_radius"]), float(json_data["_velocity"]), float(json_data["_acceleration"]), float(json_data["_wait"]), json_data["_parentStepIdx"], json_data["_useProcessMode"], float(json_data["_individualVelocityAcceleration"]), json_data["_useForceMode"], json_data["_useServoJMode"], json_data["_useSkipMove"], json_data["_setDigitalIO"], json_data["_setToolDigitalIO"], json_data["_setDigitalIONumber"], json_data["_setDigitalIOValue"], json_data.get("_setToolDigitalIONumber", 0), json_data.get("_setToolDigitalIOValue", False), json_data["_setCapability"], json_data["_setCapabilityName"], json_data["_setCapabilityType"], json_data["_setCapabilityValue"], json_data["_setCapabilityIOType"], json_data["_randomizedOffset"], float(json_data["_randomizedOffsetRadiusCM"]), json_data.get("_acquireImageTag", ""), json_data.get("_acquireImageMode", 0)) class Action: _steps: List[ActionStep] _preconditions: List[Precondition] _tip_inputs: List[ToolInteractionPoint] _name: str _softstart: bool _softstart_accel: float _softstart_velocity: float _max_accel: float _max_velocity: float _cyclic: bool _task_intent: str _intent: str _success_type: str _capture_depth_behavior: str _loop: bool def __init__(self, _steps: List[ActionStep], _preconditions: List[Precondition], _tip_inputs: List[ToolInteractionPoint], _name: str, _softstart: bool, _softstart_accel: float, _softstart_velocity: float, _max_accel: float, _max_velocity: float, _cyclic: bool, _task_intent: str, _intent: str, _success_type: str, _capture_depth_behavior: str, _loop: bool) -> None: self._steps = _steps self._preconditions = _preconditions self._tip_inputs = _tip_inputs self._name = _name self._softstart = _softstart self._softstart_accel = _softstart_accel self._softstart_velocity = _softstart_velocity self._max_accel = _max_accel self._max_velocity = _max_velocity self._cyclic = _cyclic self._task_intent = _task_intent self._intent = _intent self._success_type = _success_type self._capture_depth_behavior = _capture_depth_behavior self._loop = _loop def get_steps(self) -> List[ActionStep]: return self._steps def get_preconditions(self) -> List[Precondition]: return self._preconditions def get_tip_inputs(self) -> List[ToolInteractionPoint]: return self._tip_inputs def get_name(self) -> str: return self._name def get_softstart(self) -> bool: return self._softstart def get_softstart_accel(self) -> float: return self._softstart_accel def get_softstart_velocity(self) -> float: return self._softstart_velocity def get_max_accel(self) -> float: return self._max_accel def get_max_velocity(self) -> float: return self._max_velocity def get_cyclic(self) -> bool: return self._cyclic def get_task_intent(self) -> str: return self._task_intent def get_intent(self) -> str: return self._intent def get_success_type(self) -> str: return self._success_type def get_capture_depth_behavior(self) -> str: return self._capture_depth_behavior def get_loop(self) -> bool: return self._loop @classmethod def from_json(cls, json_data: Dict[str, Any]) -> Optional["Action"]: remap = { "Preconditions": "_preconditions", "TIPInputs": "_tipInputs", "Softstart": "_softstart", "SoftstartAccel": "_softstartAccel", "SoftstartVelocity": "_softstartVelocity", "Name": "_name", "Steps": "_steps", "MaxAccel": "_maxAccel", "MaxVelocity": "_maxVelocity", "Cyclic": "_cylic", "Loop": "_loop", } for from_key, to_key in remap.items(): if from_key in json_data: json_data[to_key] = json_data[from_key] del json_data[from_key] expect: Dict[str, Union[Type[Any], Tuple[Type[Any], ...]]] = { "_steps": list, "_preconditions": list, "_tipInputs": list, "_name": str, "_softstart": bool, "_softstartAccel": float, "_softstartVelocity": float, "_maxAccel": float, "_maxVelocity": float, "_cyclic": bool, "_taskIntent": (str, type(None)), "_intent": str, "_successType": str, "_captureDepthBehavior": str, "_loop": bool } for name, t in expect.items(): if not isinstance(json_data.get(name, None), t): logging.warning("Invalid type for %s in %s.", name, json_data) return None for name in json_data: if name not in expect: logging.warning("extra field %s in %s", name, json_data) steps = [] for step in json_data["_steps"]: step_object = ActionStep.from_json(step) if step_object is None: return None steps.append(step_object) preconditions = [] for precondition in json_data["_preconditions"]: precondition_object = Precondition.from_json(precondition) if precondition_object is None: return None preconditions.append(precondition_object) tips = [] for tip in json_data["_tipInputs"]: tip_object = ToolInteractionPoint.from_json(tip) if tip_object is None: return None tips.append(tip_object) return Action(steps, preconditions, tips, json_data["_name"], json_data["_softstart"], float(json_data["_softstartAccel"]), float(json_data["_softstartVelocity"]), float(json_data["_maxAccel"]), float(json_data["_maxVelocity"]), json_data["_cyclic"], json_data.get("_taskIntent", ""), json_data["_intent"], json_data["_successType"], json_data["_captureDepthBehavior"], json_data["_loop"]) def _from_json_vector3(json_data: Any) -> Optional[types_gen.Vec3d]: if not (isinstance(json_data.get("x"), float) or isinstance(json_data.get("x"), int)): return None if not (isinstance(json_data.get("y"), float) or isinstance(json_data.get("y"), int)): return None if not (isinstance(json_data.get("z"), float) or isinstance(json_data.get("z"), int)): return None if len(json_data) != 3: return None return types_gen.Vec3d( float(json_data["x"]), float(json_data["y"]), float(json_data["z"])) def _from_json_quaternion(json_data: Any) -> Optional[types_gen.Quaternion3d]: if not isinstance(json_data, dict): return None if not (isinstance(json_data.get("x"), float) or isinstance(json_data.get("x"), int)): return None if not (isinstance(json_data.get("y"), float) or isinstance(json_data.get("y"), int)): return None if not (isinstance(json_data.get("z"), float) or isinstance(json_data.get("z"), int)): return None if not (isinstance(json_data.get("w"), float) or isinstance(json_data.get("w"), int)): return None if len(json_data) != 4: return None return types_gen.Quaternion3d( float(json_data["w"]), float(json_data["x"]), float(json_data["y"]), float(json_data["z"])) class ActionsImpl(actionsets.Actions): _actions: List[Action] def __init__(self, actions: List[Action]) -> None: self._actions = actions
Apache License 2.0
quixey/python-aliyun
aliyun/ecs/connection.py
EcsConnection.report_expiring_instance
python
def report_expiring_instance(self, days=7): expiring_instances = [] all_instances = self.get_all_instance_ids() for ins in all_instances: res = self.get_instance(ins) if res.instance_charge_type == 'PrePaid': tz = res.expired_time.tzinfo now = datetime.datetime.now(tz) if (res.expired_time - now).days <= days: expiring_instances.append(ins) return expiring_instances
Report PrePaid instances that are about to expire in <days>. Args: days (int): Check instances that will expire in <days>.
https://github.com/quixey/python-aliyun/blob/c07286da8bb6a1c80e8a5f77b19a7c9a132ab5ed/aliyun/ecs/connection.py#L295-L315
import json import time import datetime import logging import dateutil.parser from aliyun.connection import Connection from aliyun.ecs.model import ( AutoSnapshotPolicy, AutoSnapshotExecutionStatus, AutoSnapshotPolicyStatus, Disk, DiskMapping, Image, Instance, InstanceStatus, InstanceType, Region, SecurityGroup, SecurityGroupInfo, SecurityGroupPermission, Snapshot, Zone ) BLOCK_TILL_RUNNING_SECS = 600 logger = logging.getLogger(__name__) class Error(Exception): class EcsConnection(Connection): def __init__(self, region_id, access_key_id=None, secret_access_key=None): super(EcsConnection, self).__init__( region_id, 'ecs', access_key_id=access_key_id, secret_access_key=secret_access_key) def get_all_regions(self): resp = self.get({'Action': 'DescribeRegions'}) regions = [] for region in resp['Regions']['Region']: regions.append(Region(region['RegionId'], region['LocalName'])) return regions def get_all_region_ids(self): return [x.region_id for x in self.get_all_regions()] def get_all_zones(self): resp = self.get({'Action': 'DescribeZones'}) zones = [] for zone in resp['Zones']['Zone']: zid = zone['ZoneId'] zname = zone['LocalName'] resources = zone['AvailableResourceCreation']['ResourceTypes'] disks = zone['AvailableDiskCategories']['DiskCategories'] zones.append(Zone(zid, zname, resources, disks)) return zones def get_all_zone_ids(self): return [z.zone_id for z in self.get_all_zones()] def get_all_clusters(self): params = {'Action': 'DescribeClusters'} clusters = [] for cluster in self.get(params)['Clusters']['Cluster']: clusters.append(cluster['ClusterId']) return clusters def get_all_instance_status(self, zone_id=None): instance_status = [] params = { 'Action': 'DescribeInstanceStatus' } if zone_id is not None: params.update({'ZoneId': zone_id}) for resp in self.get(params, paginated=True): for item in resp['InstanceStatuses']['InstanceStatus']: instance_status.append( InstanceStatus(item['InstanceId'], item['Status'])) return instance_status def get_all_instance_ids(self, zone_id=None): return [x.instance_id for x in self.get_all_instance_status(zone_id)] def get_instance(self, instance_id): resp = self.get({ 'Action': 'DescribeInstanceAttribute', 'InstanceId': instance_id}) return Instance( resp['InstanceId'], resp['InstanceName'], resp['ImageId'], resp['RegionId'], resp['InstanceType'], resp['HostName'], resp['Status'], [x for x in resp['SecurityGroupIds']['SecurityGroupId']], [x for x in resp['PublicIpAddress']['IpAddress']], [x for x in resp['InnerIpAddress']['IpAddress']], resp['InternetChargeType'], int(resp['InternetMaxBandwidthIn']), int(resp['InternetMaxBandwidthOut']), dateutil.parser.parse(resp['CreationTime']), dateutil.parser.parse(resp['ExpiredTime']), resp['InstanceChargeType'], resp['Description'], resp['ClusterId'], [x for x in resp['OperationLocks']['LockReason']], resp['ZoneId']) def start_instance(self, instance_id): self.get({'Action': 'StartInstance', 'InstanceId': instance_id}) def stop_instance(self, instance_id, force=False): self.get({'Action': 'StopInstance', 'InstanceId': instance_id, 'ForceStop': 'true' if force else 'false'}) def reboot_instance(self, instance_id, force=False): self.get({'Action': 'RebootInstance', 'InstanceId': instance_id, 'ForceStop': 'true' if force else 'false'}) def delete_instance(self, instance_id): self.get({'Action': 'DeleteInstance', 'InstanceId': instance_id}) def modify_instance(self, instance_id, new_instance_name=None, new_password=None, new_hostname=None, new_security_group_id=None, new_description=None): params = {'Action': 'ModifyInstanceAttribute', 'InstanceId': instance_id} if new_instance_name: params['InstanceName'] = new_instance_name if new_password: params['Password'] = new_password if new_hostname: params['HostName'] = new_hostname if new_security_group_id: params['SecurityGroupId'] = new_security_group_id if new_description: params['Description'] = new_description self.get(params) def modify_instance_spec(self, instance_id, instance_type=None, internet_max_bandwidth_out=None, internet_max_bandwidth_in=None): params = {'Action': 'ModifyInstanceSpec', 'InstanceId': instance_id} if instance_type: params['InstanceType'] = instance_type if internet_max_bandwidth_out: params['InternetMaxBandwidthOut'] = internet_max_bandwidth_out if internet_max_bandwidth_in: params['InternetMaxBandwidthIn'] = internet_max_bandwidth_in self.get(params)
Apache License 2.0
ocha-dap/hdx-python-api
src/hdx/data/resource.py
Resource.update_from_yaml
python
def update_from_yaml( self, path: str = join("config", "hdx_resource_static.yml") ) -> None: super().update_from_yaml(path)
Update resource metadata with static metadata from YAML file Args: path (Optional[str]): Path to YAML dataset metadata. Defaults to config/hdx_resource_static.yml. Returns: None
https://github.com/ocha-dap/hdx-python-api/blob/3b0b454a5bb2e477b5999cc3a96289919a3a2576/src/hdx/data/resource.py#L65-L76
import datetime import logging from os import remove from os.path import join from pathlib import Path from typing import Any, Dict, List, Optional, Tuple, Union from hdx.utilities import is_valid_uuid from hdx.utilities.downloader import Download from hdx.utilities.loader import load_json, load_yaml from hdx.utilities.path import script_dir_plus_file import hdx.data.dataset import hdx.data.filestore_helper from hdx.data.date_helper import DateHelper from hdx.data.hdxobject import HDXError, HDXObject from hdx.data.resource_view import ResourceView from hdx.hdx_configuration import Configuration logger = logging.getLogger(__name__) class Resource(HDXObject): _formats_dict = None def __init__( self, initial_data: Optional[Dict] = None, configuration: Optional[Configuration] = None, ) -> None: if not initial_data: initial_data = dict() super().__init__(initial_data, configuration=configuration) self.file_to_upload = None @staticmethod def actions() -> Dict[str, str]: return { "show": "resource_show", "update": "resource_update", "create": "resource_create", "patch": "resource_patch", "delete": "resource_delete", "search": "resource_search", "datastore_delete": "datastore_delete", "datastore_create": "datastore_create", "datastore_insert": "datastore_insert", "datastore_upsert": "datastore_upsert", "datastore_search": "datastore_search", }
MIT License
fcurella/django-recommends
recommends/algorithms/naive.py
NaiveAlgorithm.calculate_recommendations
python
def calculate_recommendations(self, vote_list, itemMatch, itemIgnored): recommendations = [] users = set(map(lambda x: x[0], vote_list)) for user in users: rankings = self.get_recommended_items(vote_list, itemMatch, itemIgnored, user) recommendations.append((user, rankings)) return recommendations
``itemMatch`` is supposed to be the result of ``calculate_similarities()`` Returns a list of recommendations: :: [ (<user1>, [ ("<object_identifier1>", <score>), ("<object_identifier2>", <score>), ]), (<user2>, [ ("<object_identifier1>", <score>), ("<object_identifier2>", <score>), ]), ]
https://github.com/fcurella/django-recommends/blob/e69820f70c9f7935850c63e01b141ac06cf58e0d/recommends/algorithms/naive.py#L58-L83
from collections import defaultdict import math from recommends.similarities import sim_distance from recommends.converters import convert_vote_list_to_userprefs, convert_vote_list_to_itemprefs from .base import BaseAlgorithm class NaiveAlgorithm(BaseAlgorithm): similarity = sim_distance def top_matches(self, prefs, p1): return [(p2, self.similarity(prefs[p1], prefs[p2])) for p2 in prefs if p2 != p1] def calculate_similarities(self, vote_list, verbose=0): itemPrefs = convert_vote_list_to_itemprefs(vote_list) itemMatch = {} for item in itemPrefs: itemMatch[item] = self.top_matches(itemPrefs, item) iteritems = itemMatch.items() return iteritems def get_recommended_items(self, vote_list, itemMatch, itemIgnored, user): prefs = convert_vote_list_to_userprefs(vote_list) itemMatch = dict(itemMatch) if user in prefs: userRatings = prefs[user] scores = defaultdict(int) totalSim = defaultdict(int) for (item, rating) in userRatings.items(): for (item2, similarity) in itemMatch[item]: if user.pk in itemIgnored and item2 in itemIgnored[user.pk]: continue if not math.isnan(similarity) and item2 not in userRatings: scores[item2] += similarity * rating totalSim[item2] += similarity rankings = ((item, (score / totalSim[item])) for item, score in scores.items() if totalSim[item] != 0) return rankings return []
MIT License
pythainlp/pythainlp
pythainlp/word_vector/core.py
WordVector.load_wordvector
python
def load_wordvector(self, model_name: str): self.model_name = model_name self.model = KeyedVectors.load_word2vec_format( get_corpus_path(self.model_name), binary=True, unicode_errors="ignore" ) self.WV_DIM = self.model.vector_size if self.model_name == "thai2fit_wv": self.tokenize = THAI2FIT_TOKENIZER.word_tokenize else: self.tokenize = word_tokenize
Load word vector model. :param str model_name: model name
https://github.com/pythainlp/pythainlp/blob/2ae3f7ad77d96136690823f33bb61f0b8610900b/pythainlp/word_vector/core.py#L321-L338
from typing import List, Tuple import warnings from gensim.models import KeyedVectors from gensim.models.keyedvectors import Word2VecKeyedVectors from numpy import ndarray, zeros from pythainlp.corpus import get_corpus_path from pythainlp.tokenize import THAI2FIT_TOKENIZER, word_tokenize WV_DIM = 300 _MODEL_NAME = "thai2fit_wv" _TK_SP = "xxspace" _TK_EOL = "xxeol" def get_model() -> Word2VecKeyedVectors: warnings.warn( "get_model is deprecated, use WordVector class instead", DeprecationWarning, ) path = get_corpus_path(_MODEL_NAME) return KeyedVectors.load_word2vec_format(path, binary=True) _MODEL = get_model() def doesnt_match(words: List[str]) -> str: warnings.warn( "doesnt_match is deprecated, use WordVector class instead", DeprecationWarning, ) return _MODEL.doesnt_match(words) def most_similar_cosmul( positive: List[str], negative: List[str] ) -> List[Tuple[str, float]]: warnings.warn( "most_similar_cosmul is deprecated, use WordVector class instead", DeprecationWarning, ) return _MODEL.most_similar_cosmul(positive=positive, negative=negative) def similarity(word1: str, word2: str) -> float: warnings.warn( "similarity is deprecated, use WordVector class instead", DeprecationWarning, ) return _MODEL.similarity(word1, word2) def sentence_vectorizer(text: str, use_mean: bool = True) -> ndarray: warnings.warn( "sentence_vectorizer is deprecated, use WordVector class instead", DeprecationWarning, ) vec = zeros((1, WV_DIM)) words = THAI2FIT_TOKENIZER.word_tokenize(text) len_words = len(words) if not len_words: return vec for word in words: if word == " ": word = _TK_SP elif word == "\n": word = _TK_EOL if word in _MODEL.index_to_key: vec += _MODEL.get_vector(word) if use_mean: vec /= len_words return vec class WordVector: def __init__(self, model_name: str = "thai2fit_wv") -> None: self.load_wordvector(model_name)
Apache License 2.0
domwillcode/yale-smart-alarm-client
yalesmartalarmclient/client.py
YaleSmartAlarmClient.get_armed_status
python
def get_armed_status(self) -> str: try: alarm_state = self.auth.get_authenticated(self._ENDPOINT_GET_MODE) except AuthenticationError as e: raise e except RequestException as e: raise e return cast(str, alarm_state.get("data")[0].get("mode"))
Get armed status.
https://github.com/domwillcode/yale-smart-alarm-client/blob/bbabc71d212adb7df4a7f5637f61f12ef8043a4d/yalesmartalarmclient/client.py#L223-L231
import logging from typing import Any, Dict, cast from requests import RequestException from .auth import YaleAuth from .exceptions import AuthenticationError from .lock import YaleDoorManAPI _LOGGER = logging.getLogger(__name__) YALE_STATE_ARM_FULL = "arm" YALE_STATE_ARM_PARTIAL = "home" YALE_STATE_DISARM = "disarm" YALE_LOCK_STATE_LOCKED = "locked" YALE_LOCK_STATE_UNLOCKED = "unlocked" YALE_LOCK_STATE_DOOR_OPEN = "dooropen" YALE_LOCK_STATE_UNKNOWN = "unknown" YALE_DOOR_CONTACT_STATE_CLOSED = "closed" YALE_DOOR_CONTACT_STATE_OPEN = "open" YALE_DOOR_CONTACT_STATE_UNKNOWN = "unknown" class YaleSmartAlarmClient: YALE_CODE_RESULT_SUCCESS = "000" _ENDPOINT_GET_MODE = "/api/panel/mode/" _ENDPOINT_SET_MODE = "/api/panel/mode/" _ENDPOINT_DEVICES_STATUS = "/api/panel/device_status/" _ENDPOINT_PANIC_BUTTON = "/api/panel/panic" _ENDPOINT_STATUS = "/yapi/api/panel/status/" _ENDPOINT_CYCLE = "/yapi/api/panel/cycle/" _ENDPOINT_ONLINE = "/yapi/api/panel/online/" _ENDPOINT_HISTORY = "/yapi/api/event/report/?page_num=1&set_utc=1" _ENDPOINT_CHECK = "/yapi/api/auth/check/" _ENDPOINT_PANEL_INFO = "/yapi/api/panel/info/" _REQUEST_PARAM_AREA = "area" _REQUEST_PARAM_MODE = "mode" _DEFAULT_REQUEST_TIMEOUT = 5 def __init__(self, username: str, password: str, area_id: int = 1) -> None: self.auth: YaleAuth = YaleAuth(username=username, password=password) self.area_id = area_id self.lock_api: YaleDoorManAPI = YaleDoorManAPI(auth=self.auth) def get_all(self) -> str: try: devices = self.auth.get_authenticated(self._ENDPOINT_DEVICES_STATUS) mode = self.auth.get_authenticated(self._ENDPOINT_GET_MODE) status = self.auth.get_authenticated(self._ENDPOINT_STATUS) cycle = self.auth.get_authenticated(self._ENDPOINT_CYCLE) online = self.auth.get_authenticated(self._ENDPOINT_ONLINE) history = self.auth.get_authenticated(self._ENDPOINT_HISTORY) panel_info = self.auth.get_authenticated(self._ENDPOINT_PANEL_INFO) auth_check = self.auth.get_authenticated(self._ENDPOINT_CHECK) except AuthenticationError as e: raise e except RequestException as e: raise e return ( " DEVICES \n" + str(devices["data"]) + "\n MODE \n" + str(mode["data"]) + "\n STATUS \n" + str(status["data"]) + "\n CYCLE \n" + str(cycle["data"]) + "\n ONLINE \n" + str(online["data"]) + "\n HISTORY \n" + str(history["data"]) + "\n PANEL INFO \n" + str(panel_info["data"]) + "\n AUTH CHECK \n" + str(auth_check["data"]) ) def get_all_devices(self) -> Dict[str, Any]: try: devices = self.auth.get_authenticated(self._ENDPOINT_DEVICES_STATUS) except AuthenticationError as e: raise e except RequestException as e: raise e return cast(Dict[str, Any], devices["data"]) def get_cycle(self) -> Dict[str, Any]: try: cycle = self.auth.get_authenticated(self._ENDPOINT_CYCLE) except AuthenticationError as e: raise e except RequestException as e: raise e return cast(Dict[str, Any], cycle["data"]) def get_status(self) -> str: try: status = self.auth.get_authenticated(self._ENDPOINT_STATUS) except AuthenticationError as e: raise e except RequestException as e: raise e acfail = status["data"]["acfail"] battery = status["data"]["battery"] tamper = status["data"]["tamper"] jam = status["data"]["jam"] if acfail == battery == tamper == jam == "main.normal": return "ok" return "error" def get_online(self) -> Dict[str, Any]: try: online = self.auth.get_authenticated(self._ENDPOINT_ONLINE) except AuthenticationError as e: raise e except RequestException as e: raise e return cast(Dict[str, Any], online["data"]) def get_panel_info(self) -> Dict[str, Any]: try: panel_info = self.auth.get_authenticated(self._ENDPOINT_PANEL_INFO) except AuthenticationError as e: raise e except RequestException as e: raise e return cast(Dict[str, Any], panel_info["data"]) def get_history(self) -> Dict[str, Any]: try: history = self.auth.get_authenticated(self._ENDPOINT_HISTORY) except AuthenticationError as e: raise e except RequestException as e: raise e return cast(Dict[str, Any], history["data"]) def get_auth_check(self) -> Dict[str, Any]: try: check = self.auth.get_authenticated(self._ENDPOINT_CHECK) except AuthenticationError as e: raise e except RequestException as e: raise e return cast(Dict[str, Any], check["data"]) def get_locks_status(self) -> Dict[str, str]: try: devices = self.auth.get_authenticated(self._ENDPOINT_DEVICES_STATUS) except AuthenticationError as e: raise e except RequestException as e: raise e locks: Dict[str, str] = {} for device in devices["data"]: if device["type"] == "device_type.door_lock": state = device["status1"] name = device["name"] lock_status_str = device["minigw_lock_status"] if lock_status_str != "": lock_status = int(lock_status_str, 16) closed = (lock_status & 16) == 16 locked = (lock_status & 1) == 1 if closed is True and locked is True: state = YALE_LOCK_STATE_LOCKED elif closed is True and locked is False: state = YALE_LOCK_STATE_UNLOCKED elif not closed: state = YALE_LOCK_STATE_DOOR_OPEN elif "device_status.lock" in state: state = YALE_LOCK_STATE_LOCKED elif "device_status.unlock" in state: state = YALE_LOCK_STATE_UNLOCKED else: state = YALE_LOCK_STATE_UNKNOWN locks[name] = state return locks def get_doors_status(self) -> Dict[str, str]: try: devices = self.auth.get_authenticated(self._ENDPOINT_DEVICES_STATUS) except AuthenticationError as e: raise e except RequestException as e: raise e doors: Dict[str, str] = {} for device in devices["data"]: if device["type"] == "device_type.door_contact": state = device["status1"] name = device["name"] if "device_status.dc_close" in state: state = YALE_DOOR_CONTACT_STATE_CLOSED elif "device_status.dc_open" in state: state = YALE_DOOR_CONTACT_STATE_OPEN else: state = YALE_DOOR_CONTACT_STATE_UNKNOWN doors[name] = state return doors
Apache License 2.0
pielco11/dot
async/2getpy.py
nostdout
python
def nostdout(): savestdout = sys.stdout sys.stdout = RecycleObject() yield sys.stdout = savestdout
Method to clean the stdout
https://github.com/pielco11/dot/blob/b1c4b2cba37135a70b64027a68c6b548fd14c737/async/2getpy.py#L24-L30
import re import asyncio from datetime import datetime import urllib.parse import time import sqlite3 import sys import contextlib import hashlib from bs4 import BeautifulSoup import aiohttp from aiohttp_socks import SocksConnector, SocksVer from elasticsearch import Elasticsearch, helpers from cashaddress import convert import validate_email class RecycleObject(object): def write(self, junk): pass def flush(self): pass @contextlib.contextmanager
MIT License
fali-kunxiaojiayuan/spatial-statistics
code/common.py
belongs
python
def belongs(boundary, point): if not point: return False d = boundary[DIMENSION] cx, cy = boundary[CENTER] px, py = point return (py <= cy + d and py >= cy - d) and (px <= cx + d and px >= cx - d)
Check if the point belongs to the boundary
https://github.com/fali-kunxiaojiayuan/spatial-statistics/blob/4b496df86e24aff5547a92156461805924a53f3d/code/common.py#L21-L30
import bisect from collections import namedtuple from scipy.spatial.distance import euclidean NO_QUADRANT = -1 NORTH_WEST = 1 NORTH_EAST = 2 SOUTH_EAST = 3 SOUTH_WEST = 4 delta=pow(10,-7) CENTER = 0 DIMENSION = 1 X = 0 Y = 1 Point = namedtuple('Point', ['x', 'y','key']) Boundary = namedtuple('Boundary', ['center', 'dimension'])
MIT License
pinterest/slackminion
slackminion/plugins/test.py
TestPlugin.shortsleep2
python
def shortsleep2(self, msg, args): self.start_timer(5, self._sleep_func2, msg.channel, ' '.join(args))
Sleep for a bit, then echo the message back
https://github.com/pinterest/slackminion/blob/5845e7a3e09c72305c06e8f0aa58db1882858aaa/slackminion/plugins/test.py#L39-L41
from ..plugin import cmd, webhook from slackminion.plugins.core import version try: from slackminion.plugins.core import commit except ImportError: commit = 'HEAD' from slackminion.plugin.base import BasePlugin class TestPlugin(BasePlugin): @cmd() def echo(self, msg, args): self.log.debug("Received args: %s", args) return ' '.join(args) @cmd() def xyzzy(self, msg, args): return "Nothing happens for %s" % msg.user @cmd() def alert(self, msg, args): self.send_message(self.config['channel'], '<!here>: something important is going to happen!') return None @webhook('/echo', form_params='foo') def web_echo(self, foo): self.send_message(self.config['channel'], foo) @cmd() def shortsleep(self, msg, args): self.start_timer(5, self._sleep_func) @cmd()
MIT License
autonomousvision/data_aggregation
carla/sensor.py
Image.save_to_disk
python
def save_to_disk(self, filename, format='.png'): filename = _append_extension(filename, format) try: from PIL import Image as PImage except ImportError: raise RuntimeError( 'cannot import PIL, make sure pillow package is installed') image = PImage.frombytes( mode='RGBA', size=(self.width, self.height), data=self.raw_data, decoder_name='raw') color = image.split() image = PImage.merge("RGB", color[2::-1]) folder = os.path.dirname(filename) if not os.path.isdir(folder): os.makedirs(folder) image.save(filename, quality=100)
Save this image to disk (requires PIL installed).
https://github.com/autonomousvision/data_aggregation/blob/76777156a465cbb77d6d5ab88da8f1812e7ff043/carla/sensor.py#L174-L195
import os from collections import namedtuple try: import numpy except ImportError: raise RuntimeError('cannot import numpy, make sure numpy package is installed.') from .transform import Transform, Translation, Rotation, Scale Color = namedtuple('Color', 'r g b') Color.__new__.__defaults__ = (0, 0, 0) Point = namedtuple('Point', 'x y z color') Point.__new__.__defaults__ = (0.0, 0.0, 0.0, None) def _append_extension(filename, ext): return filename if filename.lower().endswith(ext.lower()) else filename + ext class Sensor(object): def __init__(self, name, sensor_type): self.SensorName = name self.SensorType = sensor_type self.PositionX = 0.2 self.PositionY = 0.0 self.PositionZ = 1.3 self.RotationPitch = 0.0 self.RotationRoll = 0.0 self.RotationYaw = 0.0 def set(self, **kwargs): for key, value in kwargs.items(): if not hasattr(self, key): raise ValueError('sensor.Sensor: no key named %r' % key) setattr(self, key, value) def set_position(self, x, y, z): self.PositionX = x self.PositionY = y self.PositionZ = z def set_rotation(self, pitch, yaw, roll): self.RotationPitch = pitch self.RotationYaw = yaw self.RotationRoll = roll def get_transform(self): return Transform( Translation(self.PositionX, self.PositionY, self.PositionZ), Rotation(self.RotationPitch, self.RotationYaw, self.RotationRoll)) def get_unreal_transform(self): to_unreal_transform = Transform(Rotation(roll=-90, yaw=90), Scale(x=-1)) return self.get_transform() * to_unreal_transform class Camera(Sensor): def __init__(self, name, **kwargs): super(Camera, self).__init__(name, sensor_type="CAMERA") self.PostProcessing = 'SceneFinal' self.ImageSizeX = 720 self.ImageSizeY = 512 self.FOV = 90.0 self.set(**kwargs) def set_image_size(self, pixels_x, pixels_y): self.ImageSizeX = pixels_x self.ImageSizeY = pixels_y class Lidar(Sensor): def __init__(self, name, **kwargs): super(Lidar, self).__init__(name, sensor_type="LIDAR_RAY_CAST") self.Channels = 32 self.Range = 50.0 self.PointsPerSecond = 56000 self.RotationFrequency = 10.0 self.UpperFovLimit = 10.0 self.LowerFovLimit = -30.0 self.ShowDebugPoints = False self.set(**kwargs) class SensorData(object): def __init__(self, frame_number): self.frame_number = frame_number class Image(SensorData): def __init__(self, frame_number, width, height, image_type, fov, raw_data): super(Image, self).__init__(frame_number=frame_number) assert len(raw_data) == 4 * width * height self.width = width self.height = height self.type = image_type self.fov = fov self.raw_data = raw_data self._converted_data = None @property def data(self): if self._converted_data is None: from . import image_converter if self.type == 'Depth': self._converted_data = image_converter.depth_to_array(self) elif self.type == 'SemanticSegmentation': self._converted_data = image_converter.labels_to_array(self) else: self._converted_data = image_converter.to_rgb_array(self) return self._converted_data
MIT License
guildai/guildai
guild/view.py
ViewData.config
python
def config(self): raise NotImplementedError()
Returns dict of config for request params. Config dict must contain: cwd string Cwd used for runs titleLabel string Label suitable for browser title version string Guild version
https://github.com/guildai/guildai/blob/79d39402201168b7e94007d8e66ecf504e7aa71c/guild/view.py#L71-L81
from __future__ import absolute_import from __future__ import division import json import logging import math import os import socket import subprocess import sys import threading import time from werkzeug.exceptions import NotFound from werkzeug.utils import redirect from guild import serving_util from guild import util from guild import var log = logging.getLogger("guild") MODULE_DIR = os.path.dirname(__file__) TB_RUNS_MONITOR_INTERVAL = 5 TB_REFRESH_INTERVAL = 5 TB_DISABLED_PLUGINS = ["WhatIfToolPluginLoader"] class ViewData(object): def runs(self): raise NotImplementedError() def runs_data(self): raise NotImplementedError() def one_run(self, run_id_prefix): def one_run_data(self, run_id_prefix): raise NotImplementedError()
Apache License 2.0
idlesign/django-siteprefs
siteprefs/toolbox.py
autodiscover_siteprefs
python
def autodiscover_siteprefs(admin_site: AdminSite = None): import_prefs() try: Preference.read_prefs(get_prefs()) except DatabaseError: LOGGER.warning('Unable to read preferences from database. Skip.') else: if admin_site is None: admin_site = admin.site register_admin_models(admin_site)
Automatically discovers and registers all preferences available in all apps. :param admin_site: Custom AdminSite object.
https://github.com/idlesign/django-siteprefs/blob/dbc040b96800a73e35a3d436a5207dd658ce0c58/siteprefs/toolbox.py#L162-L181
import logging import sys from collections import OrderedDict from types import ModuleType from typing import Dict, Union, List, Tuple, Any, Optional from django.contrib import admin from django.contrib.admin import AdminSite from django.db import DatabaseError from django.db.models import Model, Field from .exceptions import SitePrefsException from .models import Preference from .signals import prefs_save from .utils import import_prefs, get_frame_locals, traverse_local_prefs, get_pref_model_admin_class, get_pref_model_class, PrefProxy, PatchedLocal, Frame __PATCHED_LOCALS_SENTINEL = '__siteprefs_locals_patched' __PREFS_REGISTRY = None __PREFS_DEFAULT_REGISTRY = OrderedDict() __MODELS_REGISTRY = {} LOGGER = logging.getLogger(__name__) def on_pref_update(*args, **kwargs): Preference.update_prefs(*args, **kwargs) Preference.read_prefs(get_prefs()) prefs_save.connect(on_pref_update) def get_prefs() -> dict: global __PREFS_REGISTRY if __PREFS_REGISTRY is None: __PREFS_REGISTRY = __PREFS_DEFAULT_REGISTRY return __PREFS_REGISTRY def get_app_prefs(app: str = None) -> dict: if app is None: with Frame(stepback=1) as frame: app = frame.f_globals['__name__'].split('.')[0] prefs = get_prefs() if app not in prefs: return {} return prefs[app] def get_prefs_models() -> Dict[str, Model]: return __MODELS_REGISTRY def bind_proxy( values: Union[List, Tuple], category: str = None, field: Field = None, verbose_name: str = None, help_text: str = '', static: bool = True, readonly: bool = False ) -> List[PrefProxy]: addrs = OrderedDict() depth = 3 for local_name, locals_dict in traverse_local_prefs(depth): addrs[id(locals_dict[local_name])] = local_name proxies = [] locals_dict = get_frame_locals(depth) for value in values: id_val = id(value) if id_val in addrs: local_name = addrs[id_val] local_val = locals_dict[local_name] if isinstance(local_val, PatchedLocal) and not isinstance(local_val, PrefProxy): proxy = PrefProxy( local_name, value.val, category=category, field=field, verbose_name=verbose_name, help_text=help_text, static=static, readonly=readonly, ) app_name = locals_dict['__name__'].split('.')[-2] prefs = get_prefs() if app_name not in prefs: prefs[app_name] = OrderedDict() prefs[app_name][local_name.lower()] = proxy locals_dict[local_name] = proxy proxies.append(proxy) return proxies def register_admin_models(admin_site: AdminSite): global __MODELS_REGISTRY prefs = get_prefs() for app_label, prefs_items in prefs.items(): model_class = get_pref_model_class(app_label, prefs_items, get_app_prefs) if model_class is not None: __MODELS_REGISTRY[app_label] = model_class admin_site.register(model_class, get_pref_model_admin_class(prefs_items))
BSD 3-Clause New or Revised License
theromanxpl0it/ghidra-emu-fun
src/emulator.py
Emulator.start
python
def start(self, byte_substitution=None): assert(self.emulator_state == EmulatorState.READY) if byte_substitution is not None: self.byte_substitution = byte_substitution self.logger.info('Started with byte_sub: %r' % self.byte_substitution) for parameter in [self.symbolMap.getParam(i) for i in range(self.symbolMap.getNumParams())]: bytesValue = self.parameterMap[parameter.name] bytesValue = self.applyByteSubstitution(bytesValue) storage = parameter.getStorage() offset = 0 for varnode in storage.getVarnodes(): chunk = bytesValue[offset:offset+varnode.getSize()] if varnode.getAddress().isStackAddress(): self.emulatorHelper.writeMemory(self.getStackAddress(varnode.getAddress().getOffset()), chunk) else: self.emulatorHelper.writeMemory(varnode.getAddress(), chunk) offset += varnode.getSize() self.emulatorHelper.setBreakpoint(self.function.getEntryPoint()) self.emulatorHelper.run(self.function.getEntryPoint(), self.entrypoint, self.monitor) self.emulator_state = EmulatorState.EXECUTING
Write the fn inputs in memory (eventually applying the byte substitution) and start the emulation, breaking at fn entry point
https://github.com/theromanxpl0it/ghidra-emu-fun/blob/21e85a5d378013fc1db941c00ed4062ae867a459/src/emulator.py#L214-L239
from ghidra.program.model.address import AddressSpace from ghidra.app.emulator import EmulatorHelper from ghidra.app.decompiler import DecompInterface from ghidra.program.flatapi import FlatProgramAPI from ghidra.pcode.emulate import EmulateExecutionState, BreakCallBack import logging import sys import string import time import random from functools import wraps import lib import struct from utils import * class EmulatorState: WAITING_FOR_PARAM, READY, EXECUTING, DONE = range(4) def history(func): @wraps(func) def wrapper(*args): args[0].logger.debug(args[1]) args[0].history.append(' '.join(args[1])) func(*args) return wrapper class DucktapeBreakCallback(BreakCallBack): def __init__(self, addressCallback, pcodeCallback): self.address_callback = addressCallback self.pcode_callback = pcodeCallback def addressCallback(self, address): self.address_callback(address) return True class Emulator(object): def __init__(self, plugin, state=None, logger_fname="ghidra_emulator.txt"): self.plugin = plugin self.monitor = self.plugin.getMonitor() if state is None: state = self.plugin.getGhidraState() program = state.getCurrentProgram() address = state.getCurrentAddress() self.byte_substitution = {} self.initLogger(logger_fname) self.initEmulator(program, address) self.initCmdHandlers() self.emulator_state = EmulatorState.WAITING_FOR_PARAM self.flatapi = FlatProgramAPI(program) def initLogger(self, fname): self.logger_fname = fname self.logger = logging.getLogger(str(random.random()).replace(".","_")) self.logger.setLevel(logging.INFO) h_stdout = logging.StreamHandler(sys.stdout) h_stdout.setLevel(logging.INFO) self.logger.addHandler(h_stdout) if self.logger_fname: h_file = logging.FileHandler(self.logger_fname) h_file.setLevel(logging.INFO) self.logger.addHandler(h_file) def initEmulator(self, program, address, clear_param_map=True): self.program = program self.function = self.program.getFunctionManager().getFunctionContaining(address) if self.function is None: function_name = self.plugin.askString("You are not in a function, please enter an address or a function name", "address or symbol name") for f in self.plugin.state.currentProgram.getFunctionManager().getFunctions(True): if function == f.getName(): self.plugin.state.setCurrentAddress(function.getEntryPoint()) self.doStart() return for f in self.plugin.state.currentProgram.getFunctionManager().getFunctions(True): if int(function, 16) == f.getEntryPoint().getOffset(): self.plugin.state.setCurrentAddress(function.getEntryPoint()) self.doStart() return self.entrypoint = self.program.getListing().getInstructionAt(self.function.getEntryPoint()) self.logger.info("Program: %s" % self.program) self.logger.info("Function: %s" % self.function) self.decompinterface = DecompInterface() self.decompinterface.openProgram(program) result = self.decompinterface.decompileFunction(self.function, 0, self.monitor) self.highFunction = result.getHighFunction() self.decompiled = str(result.getCCodeMarkup()) self.symbolMap = self.highFunction.getLocalSymbolMap() if clear_param_map: self.parameterMap = {} self.emulatorHelper = EmulatorHelper(self.program) self.stackPointer = (((1 << (self.emulatorHelper.getStackPointerRegister().getBitLength() - 1)) - 1) ^ ((1 << (self.emulatorHelper.getStackPointerRegister().getBitLength()//2))-1)) self.returnAddressSize = program.getLanguage().getProgramCounter().getBitLength() NULL_PTR_RET = 0 self.emulatorHelper.writeRegister(self.emulatorHelper.getStackPointerRegister(), self.stackPointer) self.emulatorHelper.setBreakpoint(self.getStackAddress(NULL_PTR_RET)) self.emulatorHelper.enableMemoryWriteTracking(True) self.emulator_state = EmulatorState.WAITING_FOR_PARAM if not clear_param_map: self.emulator_state = EmulatorState.READY self.history = [] self.lastAddresses = [] self.hookExternalFunctions() def hookExternalFunctions(self): for externalFunction in list(self.program.getFunctionManager().getExternalFunctions()): self.logger.debug('Found external function `%s`' % (externalFunction.getName())) for library in lib.exports: self.logger.debug('Found library `%s`' % (library.name)) for function in library.exports: self.logger.debug('Found function `%s`' % (function.__name__)) if externalFunction.getName() == function.__name__: for address in externalFunction.getFunctionThunkAddresses(): self.logger.info('Hooked function `%s`@%s with implementation lib/%s/%s' % (externalFunction.getName(), str(address), library.name, function.__name__)) callback = DucktapeBreakCallback(function(self.program, self, self.program.getFunctionManager().getFunctionAt(address), self.monitor), lambda x: True) self.emulatorHelper.emulator.getBreakTable().registerAddressCallback(address, callback) for thunkFunction in list(filter(lambda x: x.isThunk(), self.program.getFunctionManager().getFunctions(True))): for library in lib.exports: self.logger.debug('Found library `%s`' % (library.name)) for function in library.exports: self.logger.debug('Found function `%s`' % (function.__name__)) if thunkFunction.getName() == function.__name__: address = thunkFunction.getEntryPoint() self.logger.info('Hooked function `%s` at %s with implementation lib/%s/%s' % (thunkFunction.getName(), str(address), library.name, function.__name__)) callback = DucktapeBreakCallback(function(self.program, self, self.program.getFunctionManager().getFunctionAt(address), self.monitor), lambda x: True) self.emulatorHelper.emulator.getBreakTable().registerAddressCallback(address, callback) def initFunctionParameters(self, bytesValueBuffer=""): self.input_wildcards = [] self.fnParametersAllBytesValue = "" for parameter in [self.symbolMap.getParam(i) for i in range(self.symbolMap.getNumParams())]: psize = self.parameterStorageSize(parameter) if len(bytesValueBuffer) < psize*2: bytesValueBuffer = self.plugin.askString('Setting Parameters for `{}` (size: {})'.format(parameter.name, psize), 'byte values') bytesValue = bytesValueBuffer[:psize*2] bytesValue = (bytesValue + "00"*psize)[:psize*2] assert(len(bytesValue) == psize*2) for i in range(0,len(bytesValue), 2): if bytesValue[i] in string.hexdigits and bytesValue[i+1] in string.hexdigits: continue self.input_wildcards.append(bytesValue[i:i+2]) self.parameterMap[parameter.name] = bytesValue self.fnParametersAllBytesValue += bytesValue bytesValueBuffer = bytesValueBuffer[psize*2:] if self.input_wildcards: self.logger.info("Found %d wildcards: %s" % (len(self.input_wildcards), self.input_wildcards)) self.logger.info("The next batch of cmds will be executed in fuzzing mode") for w in self.input_wildcards: self.byte_substitution[w] = "00" self.emulator_state = EmulatorState.READY def parameterStorageSize(self, parameter): return sum(map(lambda x: x.getSize(), parameter.getStorage().getVarnodes())) def getAddress(self, offset): return self.program.getAddressFactory().getDefaultAddressSpace().getAddress(offset) def getStackAddress(self, offset): address = self.getAddress(self.emulatorHelper.readRegister(self.emulatorHelper.getStackPointerRegister()) + offset) orAddress = self.getAddress(self.stackPointer + offset) self.logger.debug('Stack address at {} or {}'.format(address, orAddress)) return orAddress def writeStackValue(self, offset, size, value): bytesValue = long_to_bytes(value, size) if not self.emulatorHelper.getLanguage().isBigEndian(): bytesValue = bytesValue[::-1] self.emulatorHelper.writeMemory(self.getStackAddress(offset), bytesValue) def applyByteSubstitution(self, bytesValue): for k,v in self.byte_substitution.items(): bytesValue = bytesValue.replace(k, v) return bytesValue.decode('hex')
Apache License 2.0
openstack/glance
glance/cmd/replicator.py
replication_livecopy
python
def replication_livecopy(options, args): if len(args) < 2: raise TypeError(_("Too few arguments.")) imageservice = get_image_service() target_server, target_port = utils.parse_valid_host_port(args.pop()) target_conn = http.HTTPConnection(target_server, target_port) target_client = imageservice(target_conn, options.targettoken) source_server, source_port = utils.parse_valid_host_port(args.pop()) source_conn = http.HTTPConnection(source_server, source_port) source_client = imageservice(source_conn, options.sourcetoken) updated = [] for image in source_client.get_images(): LOG.debug('Considering %(id)s', {'id': image['id']}) for key in options.dontreplicate.split(' '): if key in image: LOG.debug('Stripping %(header)s from source metadata', {'header': key}) del image[key] if _image_present(target_client, image['id']): headers = target_client.get_image_meta(image['id']) if headers['status'] == 'active': for key in options.dontreplicate.split(' '): if key in image: LOG.debug('Stripping %(header)s from source ' 'metadata', {'header': key}) del image[key] if key in headers: LOG.debug('Stripping %(header)s from target ' 'metadata', {'header': key}) del headers[key] if _dict_diff(image, headers): LOG.info(_LI('Image %(image_id)s (%(image_name)s) ' 'metadata has changed'), {'image_id': image['id'], 'image_name': image.get('name', '--unnamed--')}) headers, body = target_client.add_image_meta(image) _check_upload_response_headers(headers, body) updated.append(image['id']) elif image['status'] == 'active': LOG.info(_LI('Image %(image_id)s (%(image_name)s) ' '(%(image_size)d bytes) ' 'is being synced'), {'image_id': image['id'], 'image_name': image.get('name', '--unnamed--'), 'image_size': image['size']}) if not options.metaonly: image_response = source_client.get_image(image['id']) try: headers, body = target_client.add_image(image, image_response) _check_upload_response_headers(headers, body) updated.append(image['id']) except exc.HTTPConflict: LOG.error(_LE(IMAGE_ALREADY_PRESENT_MESSAGE) % image['id']) return updated
%(prog)s livecopy <fromserver:port> <toserver:port> Load the contents of one glance instance into another. fromserver:port: the location of the source glance instance. toserver:port: the location of the target glance instance.
https://github.com/openstack/glance/blob/ff8e3d4a0a00caff4949620e1ddf3e541a522923/glance/cmd/replicator.py#L521-L596
import os import sys from oslo_config import cfg from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_utils import encodeutils from oslo_utils import uuidutils import six from six.moves import http_client as http import six.moves.urllib.parse as urlparse from webob import exc from glance.common import config from glance.common import exception from glance.common import utils from glance.i18n import _, _LE, _LI, _LW LOG = logging.getLogger(__name__) cli_opts = [ cfg.IntOpt('chunksize', short='c', default=65536, help="Amount of data to transfer per HTTP write."), cfg.StrOpt('dontreplicate', short='D', default=('created_at date deleted_at location updated_at'), help="List of fields to not replicate."), cfg.BoolOpt('metaonly', short='m', default=False, help="Only replicate metadata, not images."), cfg.StrOpt('token', short='t', default='', help=("Pass in your authentication token if you have " "one. If you use this option the same token is " "used for both the source and the target.")), cfg.StrOpt('command', positional=True, required=False, help="Command to be given to replicator"), cfg.MultiStrOpt('args', positional=True, required=False, help="Arguments for the command"), ] CONF = cfg.CONF CONF.register_cli_opts(cli_opts) CONF.register_opt( cfg.StrOpt('sourcetoken', default='', help=("Pass in your authentication token if you have " "one. This is the token used for the source."))) CONF.register_opt( cfg.StrOpt('targettoken', default='', help=("Pass in your authentication token if you have " "one. This is the token used for the target."))) logging.register_options(CONF) CONF.set_default(name='use_stderr', default=True) possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(possible_topdir, 'glance', '__init__.py')): sys.path.insert(0, possible_topdir) COMMANDS = """Commands: help <command> Output help for one of the commands below compare What is missing from the target glance? dump Dump the contents of a glance instance to local disk. livecopy Load the contents of one glance instance into another. load Load the contents of a local directory into glance. size Determine the size of a glance instance if dumped to disk. """ IMAGE_ALREADY_PRESENT_MESSAGE = _('The image %s is already present on ' 'the target, but our check for it did ' 'not find it. This indicates that we ' 'do not have permissions to see all ' 'the images on the target server.') class ImageService(object): def __init__(self, conn, auth_token): self.auth_token = auth_token self.conn = conn def _http_request(self, method, url, headers, body, ignore_result_body=False): if self.auth_token: headers.setdefault('x-auth-token', self.auth_token) LOG.debug('Request: %(method)s http://%(server)s:%(port)s' '%(url)s with headers %(headers)s', {'method': method, 'server': self.conn.host, 'port': self.conn.port, 'url': url, 'headers': repr(headers)}) self.conn.request(method, url, body, headers) response = self.conn.getresponse() headers = self._header_list_to_dict(response.getheaders()) code = response.status code_description = http.responses[code] LOG.debug('Response: %(code)s %(status)s %(headers)s', {'code': code, 'status': code_description, 'headers': repr(headers)}) if code == http.BAD_REQUEST: raise exc.HTTPBadRequest( explanation=response.read()) if code == http.INTERNAL_SERVER_ERROR: raise exc.HTTPInternalServerError( explanation=response.read()) if code == http.UNAUTHORIZED: raise exc.HTTPUnauthorized( explanation=response.read()) if code == http.FORBIDDEN: raise exc.HTTPForbidden( explanation=response.read()) if code == http.CONFLICT: raise exc.HTTPConflict( explanation=response.read()) if ignore_result_body: response.read() return response def get_images(self): params = {'is_public': None} while True: url = '/v1/images/detail' query = urlparse.urlencode(params) if query: url += '?%s' % query response = self._http_request('GET', url, {}, '') result = jsonutils.loads(response.read()) if not result or 'images' not in result or not result['images']: return for image in result.get('images', []): params['marker'] = image['id'] yield image def get_image(self, image_uuid): url = '/v1/images/%s' % image_uuid return self._http_request('GET', url, {}, '') @staticmethod def _header_list_to_dict(headers): d = {} for (header, value) in headers: if header.startswith('x-image-meta-property-'): prop = header.replace('x-image-meta-property-', '') d.setdefault('properties', {}) d['properties'][prop] = value else: d[header.replace('x-image-meta-', '')] = value return d def get_image_meta(self, image_uuid): url = '/v1/images/%s' % image_uuid response = self._http_request('HEAD', url, {}, '', ignore_result_body=True) return self._header_list_to_dict(response.getheaders()) @staticmethod def _dict_to_headers(d): h = {} for key in d: if key == 'properties': for subkey in d[key]: if d[key][subkey] is None: h['x-image-meta-property-%s' % subkey] = '' else: h['x-image-meta-property-%s' % subkey] = d[key][subkey] else: h['x-image-meta-%s' % key] = d[key] return h def add_image(self, image_meta, image_data): url = '/v1/images' headers = self._dict_to_headers(image_meta) headers['Content-Type'] = 'application/octet-stream' headers['Content-Length'] = int(image_meta['size']) response = self._http_request('POST', url, headers, image_data) headers = self._header_list_to_dict(response.getheaders()) LOG.debug('Image post done') body = response.read() return headers, body def add_image_meta(self, image_meta): url = '/v1/images/%s' % image_meta['id'] headers = self._dict_to_headers(image_meta) headers['Content-Type'] = 'application/octet-stream' response = self._http_request('PUT', url, headers, '') headers = self._header_list_to_dict(response.getheaders()) LOG.debug('Image post done') body = response.read() return headers, body def get_image_service(): return ImageService def _human_readable_size(num, suffix='B'): for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']: if abs(num) < 1024.0: return "%3.1f %s%s" % (num, unit, suffix) num /= 1024.0 return "%.1f %s%s" % (num, 'Yi', suffix) def replication_size(options, args): if args is None or len(args) < 1: raise TypeError(_("Too few arguments.")) server, port = utils.parse_valid_host_port(args.pop()) total_size = 0 count = 0 imageservice = get_image_service() client = imageservice(http.HTTPConnection(server, port), options.targettoken) for image in client.get_images(): LOG.debug('Considering image: %(image)s', {'image': image}) if image['status'] == 'active': total_size += int(image['size']) count += 1 print(_('Total size is %(size)d bytes (%(human_size)s) across ' '%(img_count)d images') % {'size': total_size, 'human_size': _human_readable_size(total_size), 'img_count': count}) def replication_dump(options, args): if len(args) < 2: raise TypeError(_("Too few arguments.")) path = args.pop() server, port = utils.parse_valid_host_port(args.pop()) imageservice = get_image_service() client = imageservice(http.HTTPConnection(server, port), options.sourcetoken) for image in client.get_images(): LOG.debug('Considering: %(image_id)s (%(image_name)s) ' '(%(image_size)d bytes)', {'image_id': image['id'], 'image_name': image.get('name', '--unnamed--'), 'image_size': image['size']}) data_path = os.path.join(path, image['id']) data_filename = data_path + '.img' if not os.path.exists(data_path): LOG.info(_LI('Storing: %(image_id)s (%(image_name)s)' ' (%(image_size)d bytes) in %(data_filename)s'), {'image_id': image['id'], 'image_name': image.get('name', '--unnamed--'), 'image_size': image['size'], 'data_filename': data_filename}) if six.PY3: f = open(data_path, 'w', encoding='utf-8') else: f = open(data_path, 'w') with f: f.write(jsonutils.dumps(image)) if image['status'] == 'active' and not options.metaonly: LOG.debug('Image %s is active', image['id']) image_response = client.get_image(image['id']) with open(data_filename, 'wb') as f: while True: chunk = image_response.read(options.chunksize) if not chunk: break f.write(chunk) def _dict_diff(a, b): if set(a.keys()) - set(b.keys()): LOG.debug('metadata diff -- source has extra keys: %(keys)s', {'keys': ' '.join(set(a.keys()) - set(b.keys()))}) return True for key in a: if str(a[key]) != str(b[key]): LOG.debug('metadata diff -- value differs for key ' '%(key)s: source "%(source_value)s" vs ' 'target "%(target_value)s"', {'key': key, 'source_value': a[key], 'target_value': b[key]}) return True return False def replication_load(options, args): if len(args) < 2: raise TypeError(_("Too few arguments.")) path = args.pop() server, port = utils.parse_valid_host_port(args.pop()) imageservice = get_image_service() client = imageservice(http.HTTPConnection(server, port), options.targettoken) updated = [] for ent in os.listdir(path): if uuidutils.is_uuid_like(ent): image_uuid = ent LOG.info(_LI('Considering: %s'), image_uuid) meta_file_name = os.path.join(path, image_uuid) with open(meta_file_name) as meta_file: meta = jsonutils.loads(meta_file.read()) for key in options.dontreplicate.split(' '): if key in meta: LOG.debug('Stripping %(header)s from saved ' 'metadata', {'header': key}) del meta[key] if _image_present(client, image_uuid): LOG.debug('Image %s already present', image_uuid) headers = client.get_image_meta(image_uuid) for key in options.dontreplicate.split(' '): if key in headers: LOG.debug('Stripping %(header)s from target ' 'metadata', {'header': key}) del headers[key] if _dict_diff(meta, headers): LOG.info(_LI('Image %s metadata has changed'), image_uuid) headers, body = client.add_image_meta(meta) _check_upload_response_headers(headers, body) updated.append(meta['id']) else: if not os.path.exists(os.path.join(path, image_uuid + '.img')): LOG.debug('%s dump is missing image data, skipping', image_uuid) continue with open(os.path.join(path, image_uuid + '.img')) as img_file: try: headers, body = client.add_image(meta, img_file) _check_upload_response_headers(headers, body) updated.append(meta['id']) except exc.HTTPConflict: LOG.error(_LE(IMAGE_ALREADY_PRESENT_MESSAGE) % image_uuid) return updated
Apache License 2.0
burnysc2/python-sc2
sc2/units.py
Units.returning
python
def returning(self) -> Units: return self.filter(lambda unit: unit.is_returning)
Returns all workers that are carrying minerals or vespene and are returning to a townhall.
https://github.com/burnysc2/python-sc2/blob/a0b90b4447f23fc352a9bd931ae95ee5f4911032/sc2/units.py#L675-L677
from __future__ import annotations import random import warnings from itertools import chain from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Set, Tuple, Union import numpy as np from sc2.ids.unit_typeid import UnitTypeId from sc2.position import Point2, Point3 from sc2.unit import Unit warnings.simplefilter("once") if TYPE_CHECKING: from sc2.bot_ai import BotAI class Units(list): @classmethod def from_proto(cls, units, bot_object: BotAI): return cls((Unit(u, bot_object=bot_object) for u in units)) def __init__(self, units, bot_object: BotAI): super().__init__(units) self._bot_object = bot_object def __call__(self, *args, **kwargs): return UnitSelection(self, *args, **kwargs) def __iter__(self) -> Generator[Unit, None, None]: return (item for item in super().__iter__()) def select(self, *args, **kwargs): return UnitSelection(self, *args, **kwargs) def copy(self): return self.subgroup(self) def __or__(self, other: Units) -> Units: return Units( chain( iter(self), (other_unit for other_unit in other if other_unit.tag not in (self_unit.tag for self_unit in self)), ), self._bot_object, ) def __add__(self, other: Units) -> Units: return Units( chain( iter(self), (other_unit for other_unit in other if other_unit.tag not in (self_unit.tag for self_unit in self)), ), self._bot_object, ) def __and__(self, other: Units) -> Units: return Units( (other_unit for other_unit in other if other_unit.tag in (self_unit.tag for self_unit in self)), self._bot_object, ) def __sub__(self, other: Units) -> Units: return Units( (self_unit for self_unit in self if self_unit.tag not in (other_unit.tag for other_unit in other)), self._bot_object, ) def __hash__(self): return hash(unit.tag for unit in self) @property def amount(self) -> int: return len(self) @property def empty(self) -> bool: return not bool(self) @property def exists(self) -> bool: return bool(self) def find_by_tag(self, tag) -> Optional[Unit]: for unit in self: if unit.tag == tag: return unit return None def by_tag(self, tag): unit = self.find_by_tag(tag) if unit is None: raise KeyError("Unit not found") return unit @property def first(self) -> Unit: assert self, "Units object is empty" return self[0] def take(self, n: int) -> Units: if n >= self.amount: return self else: return self.subgroup(self[:n]) @property def random(self) -> Unit: assert self, "Units object is empty" return random.choice(self) def random_or(self, other: any) -> Unit: return random.choice(self) if self else other def random_group_of(self, n: int) -> Units: if n < 1: return Units([], self._bot_object) elif n >= self.amount: return self else: return self.subgroup(random.sample(self, n)) def in_attack_range_of(self, unit: Unit, bonus_distance: Union[int, float] = 0) -> Units: return self.filter(lambda x: unit.target_in_range(x, bonus_distance=bonus_distance)) def closest_distance_to(self, position: Union[Unit, Point2, Point3]) -> float: assert self, "Units object is empty" if isinstance(position, Unit): return min(self._bot_object._distance_squared_unit_to_unit(unit, position) for unit in self)**0.5 return min(self._bot_object._distance_units_to_pos(self, position)) def furthest_distance_to(self, position: Union[Unit, Point2, Point3]) -> float: assert self, "Units object is empty" if isinstance(position, Unit): return max(self._bot_object._distance_squared_unit_to_unit(unit, position) for unit in self)**0.5 return max(self._bot_object._distance_units_to_pos(self, position)) def closest_to(self, position: Union[Unit, Point2, Point3]) -> Unit: assert self, "Units object is empty" if isinstance(position, Unit): return min( (unit1 for unit1 in self), key=lambda unit2: self._bot_object._distance_squared_unit_to_unit(unit2, position), ) distances = self._bot_object._distance_units_to_pos(self, position) return min(((unit, dist) for unit, dist in zip(self, distances)), key=lambda my_tuple: my_tuple[1])[0] def furthest_to(self, position: Union[Unit, Point2, Point3]) -> Unit: assert self, "Units object is empty" if isinstance(position, Unit): return max( (unit1 for unit1 in self), key=lambda unit2: self._bot_object._distance_squared_unit_to_unit(unit2, position), ) distances = self._bot_object._distance_units_to_pos(self, position) return max(((unit, dist) for unit, dist in zip(self, distances)), key=lambda my_tuple: my_tuple[1])[0] def closer_than(self, distance: Union[int, float], position: Union[Unit, Point2, Point3]) -> Units: if not self: return self if isinstance(position, Unit): distance_squared = distance**2 return self.subgroup( unit for unit in self if self._bot_object._distance_squared_unit_to_unit(unit, position) < distance_squared ) distances = self._bot_object._distance_units_to_pos(self, position) return self.subgroup(unit for unit, dist in zip(self, distances) if dist < distance) def further_than(self, distance: Union[int, float], position: Union[Unit, Point2, Point3]) -> Units: if not self: return self if isinstance(position, Unit): distance_squared = distance**2 return self.subgroup( unit for unit in self if distance_squared < self._bot_object._distance_squared_unit_to_unit(unit, position) ) distances = self._bot_object._distance_units_to_pos(self, position) return self.subgroup(unit for unit, dist in zip(self, distances) if distance < dist) def in_distance_between( self, position: Union[Unit, Point2, Tuple[float, float]], distance1: float, distance2: float ) -> Units: if not self: return self if isinstance(position, Unit): distance1_squared = distance1**2 distance2_squared = distance2**2 return self.subgroup( unit for unit in self if distance1_squared < self._bot_object._distance_squared_unit_to_unit(unit, position) < distance2_squared ) distances = self._bot_object._distance_units_to_pos(self, position) return self.subgroup(unit for unit, dist in zip(self, distances) if distance1 < dist < distance2) def closest_n_units(self, position: Union[Unit, Point2], n: int) -> Units: if not self: return self return self.subgroup(self._list_sorted_by_distance_to(position)[:n]) def furthest_n_units(self, position: Union[Unit, Point2, np.ndarray], n: int) -> Units: if not self: return self return self.subgroup(self._list_sorted_by_distance_to(position)[-n:]) def in_distance_of_group(self, other_units: Units, distance: float) -> Units: assert other_units, "Other units object is empty" if not self: return self distance_squared = distance**2 if len(self) == 1: if any( self._bot_object._distance_squared_unit_to_unit(self[0], target) < distance_squared for target in other_units ): return self else: return self.subgroup([]) return self.subgroup( self_unit for self_unit in self if any( self._bot_object._distance_squared_unit_to_unit(self_unit, other_unit) < distance_squared for other_unit in other_units ) ) def in_closest_distance_to_group(self, other_units: Units) -> Unit: assert self, "Units object is empty" assert other_units, "Given units object is empty" return min( self, key=lambda self_unit: min(self._bot_object._distance_squared_unit_to_unit(self_unit, other_unit) for other_unit in other_units), ) def _list_sorted_closest_to_distance(self, position: Union[Unit, Point2], distance: float) -> List[Unit]: if isinstance(position, Unit): return sorted( self, key=lambda unit: abs(self._bot_object._distance_squared_unit_to_unit(unit, position) - distance), reverse=True, ) distances = self._bot_object._distance_units_to_pos(self, position) unit_dist_dict = {unit.tag: dist for unit, dist in zip(self, distances)} return sorted(self, key=lambda unit2: abs(unit_dist_dict[unit2.tag] - distance), reverse=True) def n_closest_to_distance(self, position: Union[Point2, np.ndarray], distance: Union[int, float], n: int) -> Units: return self.subgroup(self._list_sorted_closest_to_distance(position=position, distance=distance)[:n]) def n_furthest_to_distance(self, position: Union[Point2, np.ndarray], distance: Union[int, float], n: int) -> Units: return self.subgroup(self._list_sorted_closest_to_distance(position=position, distance=distance)[-n:]) def subgroup(self, units): return Units(units, self._bot_object) def filter(self, pred: callable) -> Units: assert callable(pred), "Function is not callable" return self.subgroup(filter(pred, self)) def sorted(self, key: callable, reverse: bool = False) -> Units: return self.subgroup(sorted(self, key=key, reverse=reverse)) def _list_sorted_by_distance_to(self, position: Union[Unit, Point2], reverse: bool = False) -> List[Unit]: if isinstance(position, Unit): return sorted( self, key=lambda unit: self._bot_object._distance_squared_unit_to_unit(unit, position), reverse=reverse ) distances = self._bot_object._distance_units_to_pos(self, position) unit_dist_dict = {unit.tag: dist for unit, dist in zip(self, distances)} return sorted(self, key=lambda unit2: unit_dist_dict[unit2.tag], reverse=reverse) def sorted_by_distance_to(self, position: Union[Unit, Point2], reverse: bool = False) -> Units: return self.subgroup(self._list_sorted_by_distance_to(position, reverse=reverse)) def tags_in(self, other: Union[Set[int], List[int], Dict[int, Any]]) -> Units: return self.filter(lambda unit: unit.tag in other) def tags_not_in(self, other: Union[Set[int], List[int], Dict[int, Any]]) -> Units: return self.filter(lambda unit: unit.tag not in other) def of_type(self, other: Union[UnitTypeId, Set[UnitTypeId], List[UnitTypeId], Dict[UnitTypeId, Any]]) -> Units: if isinstance(other, UnitTypeId): other = {other} elif isinstance(other, list): other = set(other) return self.filter(lambda unit: unit.type_id in other) def exclude_type(self, other: Union[UnitTypeId, Set[UnitTypeId], List[UnitTypeId], Dict[UnitTypeId, Any]]) -> Units: if isinstance(other, UnitTypeId): other = {other} elif isinstance(other, list): other = set(other) return self.filter(lambda unit: unit.type_id not in other) def same_tech(self, other: Set[UnitTypeId]) -> Units: assert isinstance(other, set), ( f"Please use a set as this filter function is already fairly slow. For example" + " 'self.units.same_tech({UnitTypeId.LAIR})'" ) tech_alias_types: Set[int] = {u.value for u in other} unit_data = self._bot_object._game_data.units for unitType in other: for same in unit_data[unitType.value]._proto.tech_alias: tech_alias_types.add(same) return self.filter( lambda unit: unit._proto.unit_type in tech_alias_types or any(same in tech_alias_types for same in unit._type_data._proto.tech_alias) ) def same_unit(self, other: Union[UnitTypeId, Set[UnitTypeId], List[UnitTypeId], Dict[UnitTypeId, Any]]) -> Units: if isinstance(other, UnitTypeId): other = {other} unit_alias_types: Set[int] = {u.value for u in other} unit_data = self._bot_object._game_data.units for unitType in other: unit_alias_types.add(unit_data[unitType.value]._proto.unit_alias) unit_alias_types.discard(0) return self.filter( lambda unit: unit._proto.unit_type in unit_alias_types or unit._type_data._proto.unit_alias in unit_alias_types ) @property def center(self) -> Point2: assert self, f"Units object is empty" amount = self.amount return Point2( ( sum(unit._proto.pos.x for unit in self) / amount, sum(unit._proto.pos.y for unit in self) / amount, ) ) @property def selected(self) -> Units: return self.filter(lambda unit: unit.is_selected) @property def tags(self) -> Set[int]: return {unit.tag for unit in self} @property def ready(self) -> Units: return self.filter(lambda unit: unit.is_ready) @property def not_ready(self) -> Units: return self.filter(lambda unit: not unit.is_ready) @property def idle(self) -> Units: return self.filter(lambda unit: unit.is_idle) @property def owned(self) -> Units: return self.filter(lambda unit: unit.is_mine) @property def enemy(self) -> Units: return self.filter(lambda unit: unit.is_enemy) @property def flying(self) -> Units: return self.filter(lambda unit: unit.is_flying) @property def not_flying(self) -> Units: return self.filter(lambda unit: not unit.is_flying) @property def structure(self) -> Units: return self.filter(lambda unit: unit.is_structure) @property def not_structure(self) -> Units: return self.filter(lambda unit: not unit.is_structure) @property def gathering(self) -> Units: return self.filter(lambda unit: unit.is_gathering) @property
MIT License
blurstudio/cross3d
cross3d/abstract/abstractsceneobject.py
AbstractSceneObject._nativeWireColor
python
def _nativeWireColor(self): return False
\remarks return the color for the wireframe of this object in the scene \sa setWireColor \return <QColor>
https://github.com/blurstudio/cross3d/blob/277968d1227de740fc87ef61005c75034420eadf/cross3d/abstract/abstractsceneobject.py#L135-L141
import cross3d from PyQt4.QtGui import QColor from cross3d import SceneWrapper, abstractmethod from cross3d.constants import ObjectType, RotationOrder class AbstractSceneObject(SceneWrapper): _objectType = ObjectType.Generic _subClasses = {} def __init__(self, scene, nativeObject): SceneWrapper.__init__(self, scene, nativeObject) if self._objectType & ObjectType.Generic: self._objectType = self._typeOfNativeObject(nativeObject) self._parameters = {} def __new__(cls, scene, nativeObject, *args, **kwargs): if not cls._subClasses: for c in cls._subclasses(cls): if not c._objectType == ObjectType.Generic: cls._subClasses[ c._objectType ] = c sceneObjectType = cls._typeOfNativeObject(nativeObject) if sceneObjectType in cls._subClasses: c = cls._subClasses[sceneObjectType] return SceneWrapper.__new__(c) return SceneWrapper.__new__(cls) @abstractmethod def _nativeType(self): return str def _findNativeChild(self, name, recursive=False, parent=None): return None @abstractmethod def _nativeCaches(self, cacheType=0): return [] @abstractmethod def _nativeChildren(self, recursive=False, wildcard='', type='', parent='', childrenCollector=[]): return [] @abstractmethod def _nativeLayer(self): return None @abstractmethod def _nativeMaterial(self): return None @abstractmethod def _nativeModel(self): return None @abstractmethod def _nativeParent(self): return None @abstractmethod
MIT License
docusign/docusign-python-client
docusign_esign/models/initial_here.py
InitialHere.custom_tab_id_metadata
python
def custom_tab_id_metadata(self, custom_tab_id_metadata): self._custom_tab_id_metadata = custom_tab_id_metadata
Sets the custom_tab_id_metadata of this InitialHere. :param custom_tab_id_metadata: The custom_tab_id_metadata of this InitialHere. # noqa: E501 :type: PropertyMetadata
https://github.com/docusign/docusign-python-client/blob/c6aeafff0d046fa6c10a398be83ba9e24b05d4ea/docusign_esign/models/initial_here.py#L924-L932
import pprint import re import six from docusign_esign.client.configuration import Configuration class InitialHere(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'anchor_allow_white_space_in_characters': 'str', 'anchor_allow_white_space_in_characters_metadata': 'PropertyMetadata', 'anchor_case_sensitive': 'str', 'anchor_case_sensitive_metadata': 'PropertyMetadata', 'anchor_horizontal_alignment': 'str', 'anchor_horizontal_alignment_metadata': 'PropertyMetadata', 'anchor_ignore_if_not_present': 'str', 'anchor_ignore_if_not_present_metadata': 'PropertyMetadata', 'anchor_match_whole_word': 'str', 'anchor_match_whole_word_metadata': 'PropertyMetadata', 'anchor_string': 'str', 'anchor_string_metadata': 'PropertyMetadata', 'anchor_tab_processor_version': 'str', 'anchor_tab_processor_version_metadata': 'PropertyMetadata', 'anchor_units': 'str', 'anchor_units_metadata': 'PropertyMetadata', 'anchor_x_offset': 'str', 'anchor_x_offset_metadata': 'PropertyMetadata', 'anchor_y_offset': 'str', 'anchor_y_offset_metadata': 'PropertyMetadata', 'conditional_parent_label': 'str', 'conditional_parent_label_metadata': 'PropertyMetadata', 'conditional_parent_value': 'str', 'conditional_parent_value_metadata': 'PropertyMetadata', 'custom_tab_id': 'str', 'custom_tab_id_metadata': 'PropertyMetadata', 'document_id': 'str', 'document_id_metadata': 'PropertyMetadata', 'error_details': 'ErrorDetails', 'form_order': 'str', 'form_order_metadata': 'PropertyMetadata', 'form_page_label': 'str', 'form_page_label_metadata': 'PropertyMetadata', 'form_page_number': 'str', 'form_page_number_metadata': 'PropertyMetadata', 'hand_draw_required': 'str', 'height': 'str', 'height_metadata': 'PropertyMetadata', 'merge_field': 'MergeField', 'merge_field_xml': 'str', 'name': 'str', 'name_metadata': 'PropertyMetadata', 'optional': 'str', 'optional_metadata': 'PropertyMetadata', 'page_number': 'str', 'page_number_metadata': 'PropertyMetadata', 'recipient_id': 'str', 'recipient_id_guid': 'str', 'recipient_id_guid_metadata': 'PropertyMetadata', 'recipient_id_metadata': 'PropertyMetadata', 'scale_value': 'str', 'scale_value_metadata': 'PropertyMetadata', 'smart_contract_information': 'SmartContractInformation', 'source': 'str', 'status': 'str', 'status_metadata': 'PropertyMetadata', 'tab_group_labels': 'list[str]', 'tab_group_labels_metadata': 'PropertyMetadata', 'tab_id': 'str', 'tab_id_metadata': 'PropertyMetadata', 'tab_label': 'str', 'tab_label_metadata': 'PropertyMetadata', 'tab_order': 'str', 'tab_order_metadata': 'PropertyMetadata', 'tab_type': 'str', 'tab_type_metadata': 'PropertyMetadata', 'template_locked': 'str', 'template_locked_metadata': 'PropertyMetadata', 'template_required': 'str', 'template_required_metadata': 'PropertyMetadata', 'tooltip': 'str', 'tool_tip_metadata': 'PropertyMetadata', 'width': 'str', 'width_metadata': 'PropertyMetadata', 'x_position': 'str', 'x_position_metadata': 'PropertyMetadata', 'y_position': 'str', 'y_position_metadata': 'PropertyMetadata' } attribute_map = { 'anchor_allow_white_space_in_characters': 'anchorAllowWhiteSpaceInCharacters', 'anchor_allow_white_space_in_characters_metadata': 'anchorAllowWhiteSpaceInCharactersMetadata', 'anchor_case_sensitive': 'anchorCaseSensitive', 'anchor_case_sensitive_metadata': 'anchorCaseSensitiveMetadata', 'anchor_horizontal_alignment': 'anchorHorizontalAlignment', 'anchor_horizontal_alignment_metadata': 'anchorHorizontalAlignmentMetadata', 'anchor_ignore_if_not_present': 'anchorIgnoreIfNotPresent', 'anchor_ignore_if_not_present_metadata': 'anchorIgnoreIfNotPresentMetadata', 'anchor_match_whole_word': 'anchorMatchWholeWord', 'anchor_match_whole_word_metadata': 'anchorMatchWholeWordMetadata', 'anchor_string': 'anchorString', 'anchor_string_metadata': 'anchorStringMetadata', 'anchor_tab_processor_version': 'anchorTabProcessorVersion', 'anchor_tab_processor_version_metadata': 'anchorTabProcessorVersionMetadata', 'anchor_units': 'anchorUnits', 'anchor_units_metadata': 'anchorUnitsMetadata', 'anchor_x_offset': 'anchorXOffset', 'anchor_x_offset_metadata': 'anchorXOffsetMetadata', 'anchor_y_offset': 'anchorYOffset', 'anchor_y_offset_metadata': 'anchorYOffsetMetadata', 'conditional_parent_label': 'conditionalParentLabel', 'conditional_parent_label_metadata': 'conditionalParentLabelMetadata', 'conditional_parent_value': 'conditionalParentValue', 'conditional_parent_value_metadata': 'conditionalParentValueMetadata', 'custom_tab_id': 'customTabId', 'custom_tab_id_metadata': 'customTabIdMetadata', 'document_id': 'documentId', 'document_id_metadata': 'documentIdMetadata', 'error_details': 'errorDetails', 'form_order': 'formOrder', 'form_order_metadata': 'formOrderMetadata', 'form_page_label': 'formPageLabel', 'form_page_label_metadata': 'formPageLabelMetadata', 'form_page_number': 'formPageNumber', 'form_page_number_metadata': 'formPageNumberMetadata', 'hand_draw_required': 'handDrawRequired', 'height': 'height', 'height_metadata': 'heightMetadata', 'merge_field': 'mergeField', 'merge_field_xml': 'mergeFieldXml', 'name': 'name', 'name_metadata': 'nameMetadata', 'optional': 'optional', 'optional_metadata': 'optionalMetadata', 'page_number': 'pageNumber', 'page_number_metadata': 'pageNumberMetadata', 'recipient_id': 'recipientId', 'recipient_id_guid': 'recipientIdGuid', 'recipient_id_guid_metadata': 'recipientIdGuidMetadata', 'recipient_id_metadata': 'recipientIdMetadata', 'scale_value': 'scaleValue', 'scale_value_metadata': 'scaleValueMetadata', 'smart_contract_information': 'smartContractInformation', 'source': 'source', 'status': 'status', 'status_metadata': 'statusMetadata', 'tab_group_labels': 'tabGroupLabels', 'tab_group_labels_metadata': 'tabGroupLabelsMetadata', 'tab_id': 'tabId', 'tab_id_metadata': 'tabIdMetadata', 'tab_label': 'tabLabel', 'tab_label_metadata': 'tabLabelMetadata', 'tab_order': 'tabOrder', 'tab_order_metadata': 'tabOrderMetadata', 'tab_type': 'tabType', 'tab_type_metadata': 'tabTypeMetadata', 'template_locked': 'templateLocked', 'template_locked_metadata': 'templateLockedMetadata', 'template_required': 'templateRequired', 'template_required_metadata': 'templateRequiredMetadata', 'tooltip': 'tooltip', 'tool_tip_metadata': 'toolTipMetadata', 'width': 'width', 'width_metadata': 'widthMetadata', 'x_position': 'xPosition', 'x_position_metadata': 'xPositionMetadata', 'y_position': 'yPosition', 'y_position_metadata': 'yPositionMetadata' } def __init__(self, _configuration=None, **kwargs): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._anchor_allow_white_space_in_characters = None self._anchor_allow_white_space_in_characters_metadata = None self._anchor_case_sensitive = None self._anchor_case_sensitive_metadata = None self._anchor_horizontal_alignment = None self._anchor_horizontal_alignment_metadata = None self._anchor_ignore_if_not_present = None self._anchor_ignore_if_not_present_metadata = None self._anchor_match_whole_word = None self._anchor_match_whole_word_metadata = None self._anchor_string = None self._anchor_string_metadata = None self._anchor_tab_processor_version = None self._anchor_tab_processor_version_metadata = None self._anchor_units = None self._anchor_units_metadata = None self._anchor_x_offset = None self._anchor_x_offset_metadata = None self._anchor_y_offset = None self._anchor_y_offset_metadata = None self._conditional_parent_label = None self._conditional_parent_label_metadata = None self._conditional_parent_value = None self._conditional_parent_value_metadata = None self._custom_tab_id = None self._custom_tab_id_metadata = None self._document_id = None self._document_id_metadata = None self._error_details = None self._form_order = None self._form_order_metadata = None self._form_page_label = None self._form_page_label_metadata = None self._form_page_number = None self._form_page_number_metadata = None self._hand_draw_required = None self._height = None self._height_metadata = None self._merge_field = None self._merge_field_xml = None self._name = None self._name_metadata = None self._optional = None self._optional_metadata = None self._page_number = None self._page_number_metadata = None self._recipient_id = None self._recipient_id_guid = None self._recipient_id_guid_metadata = None self._recipient_id_metadata = None self._scale_value = None self._scale_value_metadata = None self._smart_contract_information = None self._source = None self._status = None self._status_metadata = None self._tab_group_labels = None self._tab_group_labels_metadata = None self._tab_id = None self._tab_id_metadata = None self._tab_label = None self._tab_label_metadata = None self._tab_order = None self._tab_order_metadata = None self._tab_type = None self._tab_type_metadata = None self._template_locked = None self._template_locked_metadata = None self._template_required = None self._template_required_metadata = None self._tooltip = None self._tool_tip_metadata = None self._width = None self._width_metadata = None self._x_position = None self._x_position_metadata = None self._y_position = None self._y_position_metadata = None self.discriminator = None setattr(self, "_{}".format('anchor_allow_white_space_in_characters'), kwargs.get('anchor_allow_white_space_in_characters', None)) setattr(self, "_{}".format('anchor_allow_white_space_in_characters_metadata'), kwargs.get('anchor_allow_white_space_in_characters_metadata', None)) setattr(self, "_{}".format('anchor_case_sensitive'), kwargs.get('anchor_case_sensitive', None)) setattr(self, "_{}".format('anchor_case_sensitive_metadata'), kwargs.get('anchor_case_sensitive_metadata', None)) setattr(self, "_{}".format('anchor_horizontal_alignment'), kwargs.get('anchor_horizontal_alignment', None)) setattr(self, "_{}".format('anchor_horizontal_alignment_metadata'), kwargs.get('anchor_horizontal_alignment_metadata', None)) setattr(self, "_{}".format('anchor_ignore_if_not_present'), kwargs.get('anchor_ignore_if_not_present', None)) setattr(self, "_{}".format('anchor_ignore_if_not_present_metadata'), kwargs.get('anchor_ignore_if_not_present_metadata', None)) setattr(self, "_{}".format('anchor_match_whole_word'), kwargs.get('anchor_match_whole_word', None)) setattr(self, "_{}".format('anchor_match_whole_word_metadata'), kwargs.get('anchor_match_whole_word_metadata', None)) setattr(self, "_{}".format('anchor_string'), kwargs.get('anchor_string', None)) setattr(self, "_{}".format('anchor_string_metadata'), kwargs.get('anchor_string_metadata', None)) setattr(self, "_{}".format('anchor_tab_processor_version'), kwargs.get('anchor_tab_processor_version', None)) setattr(self, "_{}".format('anchor_tab_processor_version_metadata'), kwargs.get('anchor_tab_processor_version_metadata', None)) setattr(self, "_{}".format('anchor_units'), kwargs.get('anchor_units', None)) setattr(self, "_{}".format('anchor_units_metadata'), kwargs.get('anchor_units_metadata', None)) setattr(self, "_{}".format('anchor_x_offset'), kwargs.get('anchor_x_offset', None)) setattr(self, "_{}".format('anchor_x_offset_metadata'), kwargs.get('anchor_x_offset_metadata', None)) setattr(self, "_{}".format('anchor_y_offset'), kwargs.get('anchor_y_offset', None)) setattr(self, "_{}".format('anchor_y_offset_metadata'), kwargs.get('anchor_y_offset_metadata', None)) setattr(self, "_{}".format('conditional_parent_label'), kwargs.get('conditional_parent_label', None)) setattr(self, "_{}".format('conditional_parent_label_metadata'), kwargs.get('conditional_parent_label_metadata', None)) setattr(self, "_{}".format('conditional_parent_value'), kwargs.get('conditional_parent_value', None)) setattr(self, "_{}".format('conditional_parent_value_metadata'), kwargs.get('conditional_parent_value_metadata', None)) setattr(self, "_{}".format('custom_tab_id'), kwargs.get('custom_tab_id', None)) setattr(self, "_{}".format('custom_tab_id_metadata'), kwargs.get('custom_tab_id_metadata', None)) setattr(self, "_{}".format('document_id'), kwargs.get('document_id', None)) setattr(self, "_{}".format('document_id_metadata'), kwargs.get('document_id_metadata', None)) setattr(self, "_{}".format('error_details'), kwargs.get('error_details', None)) setattr(self, "_{}".format('form_order'), kwargs.get('form_order', None)) setattr(self, "_{}".format('form_order_metadata'), kwargs.get('form_order_metadata', None)) setattr(self, "_{}".format('form_page_label'), kwargs.get('form_page_label', None)) setattr(self, "_{}".format('form_page_label_metadata'), kwargs.get('form_page_label_metadata', None)) setattr(self, "_{}".format('form_page_number'), kwargs.get('form_page_number', None)) setattr(self, "_{}".format('form_page_number_metadata'), kwargs.get('form_page_number_metadata', None)) setattr(self, "_{}".format('hand_draw_required'), kwargs.get('hand_draw_required', None)) setattr(self, "_{}".format('height'), kwargs.get('height', None)) setattr(self, "_{}".format('height_metadata'), kwargs.get('height_metadata', None)) setattr(self, "_{}".format('merge_field'), kwargs.get('merge_field', None)) setattr(self, "_{}".format('merge_field_xml'), kwargs.get('merge_field_xml', None)) setattr(self, "_{}".format('name'), kwargs.get('name', None)) setattr(self, "_{}".format('name_metadata'), kwargs.get('name_metadata', None)) setattr(self, "_{}".format('optional'), kwargs.get('optional', None)) setattr(self, "_{}".format('optional_metadata'), kwargs.get('optional_metadata', None)) setattr(self, "_{}".format('page_number'), kwargs.get('page_number', None)) setattr(self, "_{}".format('page_number_metadata'), kwargs.get('page_number_metadata', None)) setattr(self, "_{}".format('recipient_id'), kwargs.get('recipient_id', None)) setattr(self, "_{}".format('recipient_id_guid'), kwargs.get('recipient_id_guid', None)) setattr(self, "_{}".format('recipient_id_guid_metadata'), kwargs.get('recipient_id_guid_metadata', None)) setattr(self, "_{}".format('recipient_id_metadata'), kwargs.get('recipient_id_metadata', None)) setattr(self, "_{}".format('scale_value'), kwargs.get('scale_value', None)) setattr(self, "_{}".format('scale_value_metadata'), kwargs.get('scale_value_metadata', None)) setattr(self, "_{}".format('smart_contract_information'), kwargs.get('smart_contract_information', None)) setattr(self, "_{}".format('source'), kwargs.get('source', None)) setattr(self, "_{}".format('status'), kwargs.get('status', None)) setattr(self, "_{}".format('status_metadata'), kwargs.get('status_metadata', None)) setattr(self, "_{}".format('tab_group_labels'), kwargs.get('tab_group_labels', None)) setattr(self, "_{}".format('tab_group_labels_metadata'), kwargs.get('tab_group_labels_metadata', None)) setattr(self, "_{}".format('tab_id'), kwargs.get('tab_id', None)) setattr(self, "_{}".format('tab_id_metadata'), kwargs.get('tab_id_metadata', None)) setattr(self, "_{}".format('tab_label'), kwargs.get('tab_label', None)) setattr(self, "_{}".format('tab_label_metadata'), kwargs.get('tab_label_metadata', None)) setattr(self, "_{}".format('tab_order'), kwargs.get('tab_order', None)) setattr(self, "_{}".format('tab_order_metadata'), kwargs.get('tab_order_metadata', None)) setattr(self, "_{}".format('tab_type'), kwargs.get('tab_type', None)) setattr(self, "_{}".format('tab_type_metadata'), kwargs.get('tab_type_metadata', None)) setattr(self, "_{}".format('template_locked'), kwargs.get('template_locked', None)) setattr(self, "_{}".format('template_locked_metadata'), kwargs.get('template_locked_metadata', None)) setattr(self, "_{}".format('template_required'), kwargs.get('template_required', None)) setattr(self, "_{}".format('template_required_metadata'), kwargs.get('template_required_metadata', None)) setattr(self, "_{}".format('tooltip'), kwargs.get('tooltip', None)) setattr(self, "_{}".format('tool_tip_metadata'), kwargs.get('tool_tip_metadata', None)) setattr(self, "_{}".format('width'), kwargs.get('width', None)) setattr(self, "_{}".format('width_metadata'), kwargs.get('width_metadata', None)) setattr(self, "_{}".format('x_position'), kwargs.get('x_position', None)) setattr(self, "_{}".format('x_position_metadata'), kwargs.get('x_position_metadata', None)) setattr(self, "_{}".format('y_position'), kwargs.get('y_position', None)) setattr(self, "_{}".format('y_position_metadata'), kwargs.get('y_position_metadata', None)) @property def anchor_allow_white_space_in_characters(self): return self._anchor_allow_white_space_in_characters @anchor_allow_white_space_in_characters.setter def anchor_allow_white_space_in_characters(self, anchor_allow_white_space_in_characters): self._anchor_allow_white_space_in_characters = anchor_allow_white_space_in_characters @property def anchor_allow_white_space_in_characters_metadata(self): return self._anchor_allow_white_space_in_characters_metadata @anchor_allow_white_space_in_characters_metadata.setter def anchor_allow_white_space_in_characters_metadata(self, anchor_allow_white_space_in_characters_metadata): self._anchor_allow_white_space_in_characters_metadata = anchor_allow_white_space_in_characters_metadata @property def anchor_case_sensitive(self): return self._anchor_case_sensitive @anchor_case_sensitive.setter def anchor_case_sensitive(self, anchor_case_sensitive): self._anchor_case_sensitive = anchor_case_sensitive @property def anchor_case_sensitive_metadata(self): return self._anchor_case_sensitive_metadata @anchor_case_sensitive_metadata.setter def anchor_case_sensitive_metadata(self, anchor_case_sensitive_metadata): self._anchor_case_sensitive_metadata = anchor_case_sensitive_metadata @property def anchor_horizontal_alignment(self): return self._anchor_horizontal_alignment @anchor_horizontal_alignment.setter def anchor_horizontal_alignment(self, anchor_horizontal_alignment): self._anchor_horizontal_alignment = anchor_horizontal_alignment @property def anchor_horizontal_alignment_metadata(self): return self._anchor_horizontal_alignment_metadata @anchor_horizontal_alignment_metadata.setter def anchor_horizontal_alignment_metadata(self, anchor_horizontal_alignment_metadata): self._anchor_horizontal_alignment_metadata = anchor_horizontal_alignment_metadata @property def anchor_ignore_if_not_present(self): return self._anchor_ignore_if_not_present @anchor_ignore_if_not_present.setter def anchor_ignore_if_not_present(self, anchor_ignore_if_not_present): self._anchor_ignore_if_not_present = anchor_ignore_if_not_present @property def anchor_ignore_if_not_present_metadata(self): return self._anchor_ignore_if_not_present_metadata @anchor_ignore_if_not_present_metadata.setter def anchor_ignore_if_not_present_metadata(self, anchor_ignore_if_not_present_metadata): self._anchor_ignore_if_not_present_metadata = anchor_ignore_if_not_present_metadata @property def anchor_match_whole_word(self): return self._anchor_match_whole_word @anchor_match_whole_word.setter def anchor_match_whole_word(self, anchor_match_whole_word): self._anchor_match_whole_word = anchor_match_whole_word @property def anchor_match_whole_word_metadata(self): return self._anchor_match_whole_word_metadata @anchor_match_whole_word_metadata.setter def anchor_match_whole_word_metadata(self, anchor_match_whole_word_metadata): self._anchor_match_whole_word_metadata = anchor_match_whole_word_metadata @property def anchor_string(self): return self._anchor_string @anchor_string.setter def anchor_string(self, anchor_string): self._anchor_string = anchor_string @property def anchor_string_metadata(self): return self._anchor_string_metadata @anchor_string_metadata.setter def anchor_string_metadata(self, anchor_string_metadata): self._anchor_string_metadata = anchor_string_metadata @property def anchor_tab_processor_version(self): return self._anchor_tab_processor_version @anchor_tab_processor_version.setter def anchor_tab_processor_version(self, anchor_tab_processor_version): self._anchor_tab_processor_version = anchor_tab_processor_version @property def anchor_tab_processor_version_metadata(self): return self._anchor_tab_processor_version_metadata @anchor_tab_processor_version_metadata.setter def anchor_tab_processor_version_metadata(self, anchor_tab_processor_version_metadata): self._anchor_tab_processor_version_metadata = anchor_tab_processor_version_metadata @property def anchor_units(self): return self._anchor_units @anchor_units.setter def anchor_units(self, anchor_units): self._anchor_units = anchor_units @property def anchor_units_metadata(self): return self._anchor_units_metadata @anchor_units_metadata.setter def anchor_units_metadata(self, anchor_units_metadata): self._anchor_units_metadata = anchor_units_metadata @property def anchor_x_offset(self): return self._anchor_x_offset @anchor_x_offset.setter def anchor_x_offset(self, anchor_x_offset): self._anchor_x_offset = anchor_x_offset @property def anchor_x_offset_metadata(self): return self._anchor_x_offset_metadata @anchor_x_offset_metadata.setter def anchor_x_offset_metadata(self, anchor_x_offset_metadata): self._anchor_x_offset_metadata = anchor_x_offset_metadata @property def anchor_y_offset(self): return self._anchor_y_offset @anchor_y_offset.setter def anchor_y_offset(self, anchor_y_offset): self._anchor_y_offset = anchor_y_offset @property def anchor_y_offset_metadata(self): return self._anchor_y_offset_metadata @anchor_y_offset_metadata.setter def anchor_y_offset_metadata(self, anchor_y_offset_metadata): self._anchor_y_offset_metadata = anchor_y_offset_metadata @property def conditional_parent_label(self): return self._conditional_parent_label @conditional_parent_label.setter def conditional_parent_label(self, conditional_parent_label): self._conditional_parent_label = conditional_parent_label @property def conditional_parent_label_metadata(self): return self._conditional_parent_label_metadata @conditional_parent_label_metadata.setter def conditional_parent_label_metadata(self, conditional_parent_label_metadata): self._conditional_parent_label_metadata = conditional_parent_label_metadata @property def conditional_parent_value(self): return self._conditional_parent_value @conditional_parent_value.setter def conditional_parent_value(self, conditional_parent_value): self._conditional_parent_value = conditional_parent_value @property def conditional_parent_value_metadata(self): return self._conditional_parent_value_metadata @conditional_parent_value_metadata.setter def conditional_parent_value_metadata(self, conditional_parent_value_metadata): self._conditional_parent_value_metadata = conditional_parent_value_metadata @property def custom_tab_id(self): return self._custom_tab_id @custom_tab_id.setter def custom_tab_id(self, custom_tab_id): self._custom_tab_id = custom_tab_id @property def custom_tab_id_metadata(self): return self._custom_tab_id_metadata @custom_tab_id_metadata.setter
MIT License
theroyakash/akdsframework
AKDSFramework/structure/stack.py
Stack.isEmpty
python
def isEmpty(self): return self.stack.count() == 0
Says whether the stack is empty or not Returns: - Bool: True if no elements in the stack else False.
https://github.com/theroyakash/akdsframework/blob/3c98792007389658c9ea9966c2263d6a07601f3a/AKDSFramework/structure/stack.py#L53-L59
from AKDSFramework.structure.linkedlist import SinglyLinkedList from AKDSFramework.error import EmptyStackError from typing import Any class Stack: stack: SinglyLinkedList def __init__(self): self.stack = SinglyLinkedList() def push(self, value: Any) -> None: self.stack.add(value) def pop(self) -> Any: if self.stack.count() != 0: return self.stack.removeAt(self.stack.count() - 1) else: raise LookupError('Stack Underflow') def __reversed__(self) -> SinglyLinkedList: return reversed(self.stack) def __iter__(self): raise NotImplementedError( 'Iteration over the stack is not implemented yet') def peak_top(self): return self.stack.get_head() def __len__(self): return len(self.stack)
MIT License
seomoz/simhash-db-py
bench.py
query
python
def query(): seeds = make_seeds() client = Client(args.backend, args.name, args.num_blocks, args.num_bits, **kwargs) for i in range(1000): if i % 25 == 0: print 'Querying batch %i' % i hashes = [(start + i * interval) for start, interval in seeds] try: results = client.find_all(hashes) except GeneralException as exc: print '---> Client exception: %s' % repr(exc)
Run the timing numbers for each of the provided seeds for query all
https://github.com/seomoz/simhash-db-py/blob/f9feca5b09cf094c65548c53ad596ab07cf41639/bench.py#L76-L89
import os import time import psutil import random import argparse from simhash_db import Client, GeneralException parser = argparse.ArgumentParser(description='Run benchmarks on simhash_db') parser.add_argument('--count', type=int, default=1000, help='How many thousands of keys should be inserted per process') parser.add_argument('--name', type=str, default='testing', help='The name of the set of simhashes to use') parser.add_argument('--processes', type=int, default=(2 * psutil.NUM_CPUS), help='How many processes should be forked (defaults to 2 x NUM_CPUS)') parser.add_argument('--num-blocks', dest='num_blocks', type=int, default=6, help='How many blocks to configure the client to use') parser.add_argument('--num-bits', dest='num_bits', type=int, default=3, help='How many bits to configure the client to use') parser.add_argument('--backend', type=str, required=True, help='Which backend to use') parser.add_argument('--config', type=str, required=False, help='Path to a yaml file with the host configuration') args = parser.parse_args() if args.config: from yaml import load with open(args.config) as fin: kwargs = load(fin.read()) else: kwargs = {} def make_seeds(): return [( random.randint(0, 2 ** 64), random.randint(1, 1000) ) for i in range(args.count)] def insert(): seeds = make_seeds() client = Client(args.backend, args.name, args.num_blocks, args.num_bits, **kwargs) for i in range(1000): if i % 25 == 0: print 'Inserting batch %i' % i hashes = [(start + i * interval) for start, interval in seeds] try: results = client.insert(hashes) except GeneralException as exc: print '---> Client exception: %s' % repr(exc) exit(0)
MIT License
cupy/cupy
cupy/_manipulation/join.py
stack
python
def stack(tup, axis=0, out=None): return concatenate([cupy.expand_dims(x, axis) for x in tup], axis, out)
Stacks arrays along a new axis. Args: tup (sequence of arrays): Arrays to be stacked. axis (int): Axis along which the arrays are stacked. out (cupy.ndarray): Output array. Returns: cupy.ndarray: Stacked array. .. seealso:: :func:`numpy.stack`
https://github.com/cupy/cupy/blob/a466b03ef0afd7c1ce1615e3f48da64ae38c1320/cupy/_manipulation/join.py#L122-L135
import cupy from cupy import _core def column_stack(tup): if any(not isinstance(a, cupy.ndarray) for a in tup): raise TypeError('Only cupy arrays can be column stacked') lst = list(tup) for i, a in enumerate(lst): if a.ndim == 1: a = a[:, cupy.newaxis] lst[i] = a elif a.ndim != 2: raise ValueError( 'Only 1 or 2 dimensional arrays can be column stacked') return concatenate(lst, axis=1) def concatenate(tup, axis=0, out=None, *, dtype=None, casting='same_kind'): if axis is None: tup = [m.ravel() for m in tup] axis = 0 return _core.concatenate_method(tup, axis, out, dtype, casting) def dstack(tup): return concatenate([cupy.atleast_3d(m) for m in tup], 2) def hstack(tup): arrs = [cupy.atleast_1d(a) for a in tup] axis = 1 if arrs[0].ndim == 1: axis = 0 return concatenate(arrs, axis) def vstack(tup): return concatenate([cupy.atleast_2d(m) for m in tup], 0)
MIT License
obaraemmanuel/formation
hoverset/ui/widgets.py
Widget.absolute_bounds
python
def absolute_bounds(self): self.update_idletasks() return (self.winfo_rootx(), self.winfo_rooty(), self.winfo_rootx() + self.width, self.winfo_rooty() + self.height)
Get the position of the widget on the screen :return: a tuple containing the bounding box of the widget (x1, y2, x2, y2)
https://github.com/obaraemmanuel/formation/blob/31244cbceb1bb405007f5f051ae2102ab021e779/hoverset/ui/widgets.py#L826-L834
import abc import functools import logging import os import re import webbrowser import tkinter as tk import tkinter.ttk as ttk from collections import namedtuple from tkinter import font from hoverset.data.images import load_image_to_widget from hoverset.data.utils import get_resource_path, get_theme_path from hoverset.platform import platform_is, WINDOWS, LINUX, MAC from hoverset.ui.animation import Animate, Easing from hoverset.ui.icons import get_icon_image from hoverset.ui.styles import StyleDelegator from hoverset.ui.windows import DragWindow from hoverset.ui.menu import MenuUtils import hoverset.ui __all__ = ( "Application", "Button", "Canvas", "CenterWindowMixin", "Checkbutton", "ComboBox", "CompoundList", "ContextMenuMixin", "DragWindow", "DrawOver", "EditableMixin", "Entry", "EventMask", "EventWrap", "FontStyle", "Frame", "Scale", "ImageCacheMixin", "Label", "LabelFrame", "MenuButton", "Message", "PanedWindow", "Popup", "PositionMixin", "ProgressBar", "RadioButton", "RadioButtonGroup", "Screen", "ScrollableInterface", "ScrolledFrame", "SpinBox", "Spinner", "TabView", "ToggleButton", "ToolWindow", "Tree", "TreeView", "Text", "Widget", "WidgetError", "Window", "WindowMixin", "chain", "clean_styles", "set_ttk_style", "system_fonts", "suppress_change", ) class FontStyle(font.Font): @staticmethod def families(root=None, displayof=None): return font.families(root, displayof) @staticmethod def nametofont(name): try: return font.nametofont(name) except tk.TclError: return None @staticmethod def names(root=None): return font.names(root) class EventMask: SHIFT = 0x0001 CAPS_LOCK = 0x0002 CONTROL = 0x0004 L_ALT = 0x0008 NUM_LOCK = 0x0010 R_ALT = 0x0080 MOUSE_BUTTON_1 = 0x0100 MOUSE_BUTTON_2 = 0x0200 MOUSE_BUTTON_3 = 0x0400 EventWrap = namedtuple('EventWrap', ['x_root', 'y_root', 'x', 'y']) EventWrap.__doc__ = """ Imitate a tkinter event object for use when handling synthetic events""" class WidgetError(tk.TclError): pass def chain(func): @functools.wraps(func) def wrap(self, *args, **kwargs): func(self, *args, **kwargs) return self return wrap def set_ttk_style(widget, cnf=None, **styles) -> None: if cnf is None: cnf = {} styles.update(cnf) ttk_style = ttk.Style() orient = "." + str(widget['orient']).title() if 'orient' in widget.keys() else '' class_name = 'hover{}.{}'.format(orient, widget.winfo_class()) ttk_style.configure(class_name, **styles) widget.configure(style=class_name) def config_ttk(widget, cnf=None, **styles) -> None: if cnf is None: cnf = {} styles.update(cnf) direct = {i: styles[i] for i in styles.keys() & set(widget.keys())} widget.configure(**direct) set_ttk_style( widget, None, **{i: styles[i] for i in styles.keys() - direct.keys()}) def clean_styles(widget, styles) -> dict: allowed_styles = widget.config() or {} cleaned_styles = {} for style in styles: if style in allowed_styles: cleaned_styles[style] = styles[style] return cleaned_styles def system_fonts(): fonts = sorted(list(font.families())) fonts = list(filter(lambda x: not x.startswith("@"), fonts)) return fonts def suppress_change(func): @functools.wraps(func) def inner(self, *args, **kwargs): temp = self._on_change self._on_change = None func(self, *args, **kwargs) self._on_change = temp return inner class EditableMixin: def set_validator(self, validator, *args, **kwargs) -> None: self.configure( validate='all', validatecommand=( self.register(lambda val: validator(val, *args, **kwargs)), "%P" ) ) def on_change(self, callback, *args, **kwargs): self._var.trace("w", lambda *_: callback(*args, **kwargs)) def on_entry(self, callback, *args, **kwargs): self.bind("<KeyRelease>", lambda *_: callback(*args, **kwargs)) def disabled(self, flag): if flag: self.config(state='disabled') else: self.config(state='normal') def get(self): return self._var.get() class ContextMenuMixin: _on_context_menu = None @functools.wraps(MenuUtils.make_dynamic, assigned=('__doc__',)) def make_menu(self, templates, parent=None, dynamic=True, **cnf): return MenuUtils.make_dynamic(templates, parent or self, self.style, dynamic, **cnf) def set_up_context(self, templates, **cnf): self.context_menu = self.make_menu(templates, **cnf) MenuUtils.bind_all_context( self, lambda event: ContextMenuMixin.popup(event, self.context_menu), add='+' ) @staticmethod def popup(event, menu): MenuUtils.popup(event, menu) @staticmethod def add_context_menu(menu, widget): MenuUtils.bind_context( widget, lambda event: ContextMenuMixin.popup(event, menu), add='+' ) class ScrollableInterface: def on_mousewheel(self, event): raise NotImplementedError("on_mousewheel method is required") def handle_wheel(self, widget, event): delta = 0 if platform_is(LINUX): delta = 1 if event.num == 5 else -1 elif platform_is(MAC): delta = -1 * event.delta elif platform_is(WINDOWS): delta = -1 * (event.delta // 120) if event.state & EventMask.CONTROL: widget.xview_scroll(delta, "units") else: widget.yview_scroll(delta, "units") def scroll_position(self): raise NotImplementedError("Scroll position required for scroll transfer") def scroll_transfer(self) -> bool: return False class CenterWindowMixin: GEOMETRY_RGX = re.compile( r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)" ) def enable_centering(self): self.centered = False self._vis_bind = self.bind('<Visibility>', self.center, '+') if platform_is(WINDOWS): self._conf_bind = self.bind('<Configure>', self.center, '+') self.event_generate('<Configure>') def center(self, *_): if not self.centered: self.update_idletasks() ref_geometry = self.position_ref.get_geometry() geometry = self.get_geometry() if ref_geometry is None or geometry is None: logging.error("Failed to fetch geometry") return r_width, r_height, r_x, r_y = ref_geometry width, height, *_ = geometry x = int((r_width - width) / 2) + r_x y = int((r_height - height) / 2) + r_y self.geometry("+{}+{}".format(x, y)) self.centered = True if self.winfo_width() != 1 else False else: if hasattr(self, '_conf_bind'): self.unbind(self._conf_bind) if hasattr(self, '_vis_bind'): self.unbind(self._vis_bind) def get_geometry(self): search = self.GEOMETRY_RGX.search(self.geometry()) if search is None: return None return tuple(map(int, search.groups())) class PositionMixin: def get_pos(self, widget, **kwargs): side = kwargs.get("side", "auto") padding = kwargs.get("padding", 2) if "width" in kwargs and "height" in kwargs: w_width = kwargs.get("width") w_height = kwargs.get("height") else: self.re_calibrate() self.update_idletasks() w_width = self.width w_height = self.height widget.update_idletasks() x, y, width, height = widget.winfo_rootx(), widget.winfo_rooty(), widget.width, widget.height right = x left = x - w_width + width top = y - w_height - padding bottom = y + height + padding if side == "nw": return left, top elif side == "ne": return right, top elif side == "sw": return left, bottom elif side == "se": return right, bottom else: win_bounds = 0, 0, widget.winfo_screenwidth(), widget.winfo_screenheight() offset_b = win_bounds[3] - bottom offset_t = y - win_bounds[1] offset_l = x - win_bounds[0] offset_r = win_bounds[2] - right x_pos = left if offset_l >= offset_r or offset_l > w_width else right y_pos = bottom if offset_b >= offset_t or offset_b > w_height else top return x_pos, y_pos def post(self, widget, **kwargs): self.set_geometry(self.get_pos(widget, **kwargs)) class _Tooltip(tk.Toplevel): Y_CLEARANCE = 10 def __init__(self, style: StyleDelegator, xy: tuple, render, master=None): super().__init__(master) self.geometry(f"+{self.winfo_screenwidth() + 1000}+{self.winfo_screenheight() + 1000}") self.style = style self.overrideredirect(True) self.lift(master) render(self) self.config(**style.bright_highlight) self._position(xy) def _position(self, xy): self.update_idletasks() w, h = self.winfo_width(), self.winfo_height() x, y = xy x -= w // 2 y = y - self.Y_CLEARANCE - h if y - self.Y_CLEARANCE - h > 0 else y + self.Y_CLEARANCE x -= max(0, (x + w) - self.winfo_screenwidth()) x = max(0, x) self.geometry('+{}+{}'.format(x, y)) self.lift() class Widget: s_style = None s_window = None __readonly_options = {"class", "container"} def setup(self, _=None): self._allow_drag = False self._drag_setup = False self._tooltip_text = None self._tooltip_ev = None self._tooltip_win = None self._tooltip_bound = False self._tooltip_delay = 1500 @property def allow_drag(self): return self._allow_drag @allow_drag.setter def allow_drag(self, flag: bool): self._allow_drag = flag if self._allow_drag and not self._drag_setup: self.bind_all('<Motion>', self._drag_handler) self.bind_all('<ButtonRelease-1>', self._drag_handler) self._drag_setup = True def _drag_handler(self, event): if not self.allow_drag: return if event.type.value == "6": if event.state & EventMask.MOUSE_BUTTON_1 and self.window.drag_window is None: self.window.drag_context = self self.window.drag_window = DragWindow(self.window) self.render_drag(self.window.drag_window) self.window.drag_window.set_position(event.x_root, event.y_root) self.on_drag_start(event) elif self.window.drag_window is not None: self.window.drag_window.set_position(event.x_root, event.y_root) elif event.type.value == "5": if self.window.drag_window: self.window.drag_window.destroy() self.window.drag_window = None event_position = self.event_first(event, self, Widget) if isinstance(event_position, Widget): event_position.accept_context(self.window.drag_context) self.window.drag_context = None def accept_context(self, context): logging.info(f"Accepted context {context}") def render_drag(self, window): tk.Label(window, text="Item", bg="#f7f7f7").pack() def on_drag_start(self, *args): pass def config_all(self, cnf=None, **kwargs): self.config(cnf, **kwargs) def bind_all(self, sequence=None, func=None, add=None): return self.bind(sequence, func, add) @property def width(self) -> int: return self.winfo_width() @property def height(self) -> int: return self.winfo_height() def disabled(self, flag: bool) -> None: if flag: self.config(**clean_styles(self, {"state": tk.DISABLED})) else: self.config(**clean_styles(self, {"state": tk.NORMAL})) @staticmethod def containing(x, y, widget): try: return widget.winfo_containing(x, y) except KeyError: return None @staticmethod def event_in(event, widget): x, y = event.x_root, event.y_root x1, y1, x2, y2 = ( widget.winfo_rootx(), widget.winfo_rooty(), widget.winfo_rootx() + widget.winfo_width(), widget.winfo_rooty() + widget.winfo_height(), ) return x1 < x < x2 and y1 < y < y2 @classmethod def event_first(cls, event, widget, class_: type, ignore=None): check = cls.containing(event.x_root, event.y_root, widget) while not isinstance(check, Application) and check is not None: if isinstance(check, class_) and check != ignore: return check check = check.nametowidget(check.winfo_parent()) return None @classmethod def ancestor_first(cls, start_from, class_: type, ignore=None): check = start_from.nametowidget(start_from.winfo_parent()) while not isinstance(check, Application) and check is not None: if isinstance(check, class_) and check != ignore: return check check = check.nametowidget(check.winfo_parent())
MIT License
yujiabao/r2a
r2a/src/train_utils.py
evaluate_batch
python
def evaluate_batch(model, optimizer, task, batch, src_batches, tar_batches, args, writer=None): for key in model.keys(): model[key].eval() if key in optimizer: optimizer[key].zero_grad() if args.l_wd != 0 and (args.mode == 'train_r2a' or args.mode == 'test_r2a'): model['critic'].train() i = 0 while True: tar_text, _, _, _, _, tar_text_len, _, _, _ = next(tar_batches) tar_text, tar_text_len = _to_tensor([tar_text, tar_text_len], args.cuda) src_text, _, _, _, _, src_text_len, _, _, _ = next(src_batches) src_text, src_text_len = _to_tensor([src_text, src_text_len], args.cuda) tar_hidden, _ = model['encoder'](tar_text, tar_text_len, False) src_hidden, _ = model['encoder'](src_text, src_text_len, False) invar_tar_hidden = model['transform'](tar_hidden) invar_src_hidden = model['transform'](src_hidden) optimizer['critic'].zero_grad() loss_wd, grad_penalty = model['critic']( invar_src_hidden.detach(), src_text_len, invar_tar_hidden.detach(), tar_text_len, False) loss = -loss_wd + args.l_grad_penalty * grad_penalty loss.backward() optimizer['critic'].step() i += 1 if i >= args.critic_steps and _to_number(loss_wd) > 0: break model['critic'].eval() text, rat_freq, rationale, gold_att, pred_att, text_len, label, raw, _ = batch text, text_len, rat_freq, rationale, gold_att, pred_att, label = _to_tensor( [text, text_len, rat_freq, rationale, gold_att, pred_att, label], args.cuda) text_mask = _get_mask(text_len, args.cuda) hidden, loss_src_lm = model['encoder'](text, text_len, True) invar_hidden = model['transform'](hidden) loss_src_lm = np.ones(len(raw)) * _to_number(loss_src_lm) loss_tar_lm = 0 if args.l_wd != 0 and (args.mode == 'test_r2a' or args.mode == 'train_r2a'): tar_text, _, _, _, _, tar_text_len, _, _, _ = next(tar_batches) tar_text_len = tar_text_len[:len(raw)] tar_text = tar_text[:len(raw),:max(tar_text_len)] tar_text, tar_text_len = _to_tensor([tar_text, tar_text_len], args.cuda) tar_hidden, loss_tar_lm = model['encoder'](tar_text, tar_text_len, True) invar_tar_hidden = model['transform'](tar_hidden) loss_wd, _ = model['critic'](invar_hidden, text_len, invar_tar_hidden, tar_text_len, True) loss_wd = np.ones(len(raw)) * _to_number(loss_wd) else: loss_wd = np.zeros(len(raw)) loss_tar_lm = np.ones(len(raw)) * _to_number(loss_tar_lm) out, att, log_att = model[task](hidden, text_mask) if args.num_classes[task] == 1: loss_lbl = _to_numpy(F.mse_loss(torch.sigmoid(out.squeeze(1)), label, reduce=False)) pred_lbl = _to_numpy(torch.sigmoid(out.squeeze(1))) else: loss_lbl = _to_numpy(F.cross_entropy(out, label, reduce=False)) pred_lbl = np.argmax(_to_numpy(out), axis=1) true_lbl = _to_numpy(label) if _to_number(torch.min(torch.sum(rationale, dim=1))) < 0.5: rationale = rationale + 1e-6 normalized_rationale = rationale * text_mask normalized_rationale = normalized_rationale / torch.sum(normalized_rationale, dim=1, keepdim=True) if args.mode == 'train_clf' or args.mode == 'test_clf': if args.att_target == 'gold_att': target = gold_att elif args.att_target == 'rationale': target = normalized_rationale elif args.att_target == 'pred_att': target = pred_att else: raise ValueError('Invalid supervision type.') log_pred_att = torch.log(pred_att) elif args.mode == 'train_r2a': pred_att, log_pred_att = model['r2a']( invar_hidden, rationale, rat_freq, text_len, text_mask) else: raise ValueError('Invalid mode') loss_a2r = 1 - F.cosine_similarity(att, normalized_rationale) loss_a2r = _to_numpy(loss_a2r) loss_r2a = 1 - F.cosine_similarity(att, pred_att) loss_r2a = _to_numpy(loss_r2a) if writer: gold_att, rationale, pred_att, rat_freq = _to_numpy([att, rationale, pred_att, rat_freq]) data_utils.write(writer, task, raw, true_lbl, gold_att, rationale, pred_att, rat_freq) return { 'true_lbl': true_lbl, 'pred_lbl': pred_lbl, 'loss_r2a': loss_r2a, 'loss_lbl': loss_lbl, 'loss_wd' : loss_wd, 'loss_a2r': loss_a2r, 'loss_src_lm': loss_src_lm, 'loss_tar_lm': loss_tar_lm, }
Evaluate the network on a batch of examples model: a dictionary of networks optimizer: the optimizer that updates the network weights task: the name of the task batch: a batch of examples for the specified task src_batches: an iterator that generates a batch of source examples (used for estimating the wasserstein distance) tar_batches: an iterator that generates a batch of source examples (used for estimating the wasserstein distance) args: the overall argument writer: a file object. If not none, will write the prediction result and the generated attention to the file
https://github.com/yujiabao/r2a/blob/7cef16ff7a4ce9c2adcfd18f2e99a2cae3ce87cb/r2a/src/train_utils.py#L509-L679
import os import sys import torch import torch.autograd as autograd from itertools import chain import torch.nn.functional as F import torch.nn as nn import torch.utils.data as data import torch.optim as optim import data_utils as data_utils import model_utils as model_utils import datetime import sklearn.metrics as metrics import time import numpy as np from termcolor import colored import math from tqdm import tqdm def _to_tensor(x_list, cuda=True): if type(x_list) is not list: x_list = [x_list] res_list = [] for x in x_list: x = torch.from_numpy(x) if cuda: x = x.cuda() res_list.append(x) if len(res_list) == 1: return res_list[0] else: return tuple(res_list) def _to_numpy(x_list): if type(x_list) is not list: x_list = [x_list] res_list = [] for x in x_list: res_list.append(x.data.cpu().numpy()) if len(res_list) == 1: return res_list[0] else: return tuple(res_list) def _to_number(x): if isinstance(x, torch.Tensor): return x.item() else: return x def _compute_score(y_pred, y_true, num_classes=2): if num_classes == 2: average = "binary" else: average = "macro" acc = metrics.accuracy_score( y_pred=y_pred, y_true=y_true) f1 = metrics.f1_score( y_pred=y_pred, y_true=y_true, average=average) recall = metrics.recall_score( y_pred=y_pred, y_true=y_true, average=average) precision = metrics.precision_score(y_pred=y_pred, y_true=y_true, average=average) return acc, f1, recall, precision def _init_optimizer(model, args): optimizer = {} optimizer['critic'] = torch.optim.Adam( filter(lambda p: p.requires_grad, model['critic'].parameters()) , lr=args.lr) optimizer['encoder'] = torch.optim.Adam( filter(lambda p: p.requires_grad, model['encoder'].parameters()) , lr=args.lr) optimizer['r2a'] = torch.optim.Adam( filter(lambda p: p.requires_grad, chain(model['r2a'].parameters(), model['transform'].parameters())), lr=args.lr) for task in args.src_dataset: if task != '': optimizer[task] = torch.optim.Adam( filter(lambda p: p.requires_grad, model[task].parameters()) , lr=args.lr) scheduler = {} scheduler['encoder'] = torch.optim.lr_scheduler.ReduceLROnPlateau( optimizer['encoder'], 'min', patience=args.patience, factor=0.1, verbose=True) scheduler['r2a'] = torch.optim.lr_scheduler.ReduceLROnPlateau( optimizer['r2a'], 'min', patience=args.patience, factor=0.1, verbose=True) for task in args.src_dataset: if task != '': scheduler[task] = torch.optim.lr_scheduler.ReduceLROnPlateau( optimizer[task], 'min', patience=args.patience, factor=0.1, verbose=True) return optimizer, scheduler def _print_train_res(train_res, args): print('=== TRAIN ===') for task in args.src_dataset: print("{:15s} {:s} {:.2f} {:s} {:.4f}, {:s} {:.4f} * {:.1e} {:s} {:.4f} * {:.1e}, {:s} {:.4f} * {:.1e}," " {:s} {:.4f} * {:.1e}, {:s} {:.4f} * {:.1e}".format( task, colored("ep:", "cyan"), train_res[-1][task]['epoch'], colored("l_lbl", "red"), sum([res[task]['loss_lbl'] for res in train_res])/len(train_res), colored("l_wd ", "red"), sum([res[task]['loss_wd'] for res in train_res])/len(train_res), args.l_wd, colored("l_src_lm ", "red"), sum([res[task]['loss_src_lm'] for res in train_res])/len(train_res), args.l_lm, colored("l_tar_lm ", "red"), sum([res[task]['loss_tar_lm'] for res in train_res])/len(train_res), args.l_lm, colored("l_r2a", "red"), sum([res[task]['loss_r2a'] for res in train_res])/len(train_res), args.l_r2a, colored("l_a2r", "red"), sum([res[task]['loss_a2r'] for res in train_res])/len(train_res), args.l_a2r, )) def print_dev_res(dev_res, args): loss_tot = sum([res['loss_total'] for res in dev_res])/len(dev_res) loss_lbl = sum([res['loss_lbl'] for res in dev_res])/len(dev_res) loss_wd = sum([res['loss_wd'] for res in dev_res])/len(dev_res) loss_r2a = sum([res['loss_r2a'] for res in dev_res])/len(dev_res) loss_lbl_r2a = sum([res['loss_lbl_r2a'] for res in dev_res])/len(dev_res) loss_a2r = sum([res['loss_a2r'] for res in dev_res])/len(dev_res) loss_encoder = sum([res['loss_encoder'] for res in dev_res])/len(dev_res) loss_src_lm = sum([res['loss_src_lm'] for res in dev_res])/len(dev_res) loss_tar_lm = sum([res['loss_tar_lm'] for res in dev_res])/len(dev_res) acc = sum([res['acc'] for res in dev_res])/len(dev_res) recall = sum([res['recall'] for res in dev_res])/len(dev_res) f1 = sum([res['f1'] for res in dev_res])/len(dev_res) precision = sum([res['precision'] for res in dev_res])/len(dev_res) print("{:15s} {:s} {:.4f}, {:s} {:.4f} * {:.1e}, {:s} {:.4f} * {:.1e}, {:s} {:.4f} * {:.1e}, {:s} {:.4f} * {:.1e}, " "{:s} {:.4f} * {:.1e}".format( 'overall', colored("l_lbl", "red"), loss_lbl, colored("l_wd ", "red"), loss_wd, args.l_wd, colored("l_src_lm ", "red"), loss_src_lm, args.l_lm, colored("l_tar_lm ", "red"), loss_tar_lm, args.l_lm, colored("l_r2a", "red"), loss_r2a, args.l_r2a, colored("l_a2r", "red"), loss_a2r, args.l_a2r, )) return { 'loss_total': loss_tot, 'loss_lbl': loss_lbl, 'loss_lbl_r2a': loss_lbl_r2a, 'loss_wd': loss_wd, 'loss_r2a': loss_r2a, 'loss_a2r': loss_a2r, 'loss_src_lm': loss_src_lm, 'loss_tar_lm': loss_tar_lm, 'loss_encoder':loss_encoder, 'acc': acc, 'recall': recall, 'f1': f1, 'precision': precision, } def train(train_data, dev_data, model, args): timestamp = str(int(time.time() * 1e7)) out_dir = os.path.abspath(os.path.join(os.path.curdir, "tmp-runs", timestamp)) print("Saving the model to {}\n".format(out_dir)) if not os.path.exists(out_dir): os.makedirs(out_dir) best = 100 best_path = "" sub_cycle = 0 optimizer, scheduler = _init_optimizer(model, args) tar_train_batches = None if (args.mode == 'train_clf' or args.mode == 'test_clf') else data_utils.data_loader(train_data[args.tar_dataset], args.batch_size, oneEpoch=False) src_unlbl_train_batches = None if (args.mode == 'train_clf' or args.mode == 'test_clf') else data_utils.data_loader(train_data[args.src_dataset[0]], args.batch_size, oneEpoch=False) src_train_batches = data_utils.data_dict_loader(train_data, args.src_dataset, args.batch_size) tar_dev_data = None if args.tar_dataset == '' else dev_data[args.tar_dataset] ep = 1 while True: start = time.time() train_res = [] if args.dispatcher: for i in range(args.epoch_size): cur_res = train_batch( model, next(src_train_batches), src_unlbl_train_batches, tar_train_batches, optimizer, args) train_res.append(cur_res) else: for batch in tqdm(range(args.epoch_size), dynamic_ncols=True): cur_res = train_batch( model, next(src_train_batches), src_unlbl_train_batches, tar_train_batches, optimizer, args) train_res.append(cur_res) end = time.time() print("\n{}, Updates {:5d}, Time Cost: {} seconds".format( datetime.datetime.now().strftime('%02y/%02m/%02d %H:%M:%S'), ep*args.epoch_size, end-start)) _print_train_res(train_res, args) print('=== DEV ===') dev_res = [] for task in args.src_dataset: writer = open(os.path.join(out_dir, str(ep)) + '.' + task + '.out', 'w') cur_res = evaluate_task( dev_data[task], task, tar_dev_data, model, optimizer, args, writer=writer) writer.close() dev_res.append(cur_res) scheduler[task].step(cur_res['loss_lbl']) dev_res = print_dev_res(dev_res, args) scheduler['encoder'].step(dev_res['loss_encoder']) scheduler['r2a'].step(dev_res['loss_r2a']) if (args.mode != 'train_clf' and dev_res['loss_lbl_r2a'] < best) or (args.mode == 'train_clf' and dev_res['loss_lbl'] < best): best = dev_res['loss_lbl_r2a'] if args.mode != 'train_clf' else dev_res['loss_lbl'] best_path = os.path.join(out_dir, str(ep)) model_utils.save_model(model, best_path) sub_cycle = 0 else: sub_cycle += 1 if sub_cycle == args.patience*2: break ep += 1 print("End of training. Restore the best weights") model = model_utils.load_saved_model(best_path, args) print('=== BEST DEV ===') dev_res = [] for task in args.src_dataset: cur_res = evaluate_task( dev_data[task], task, tar_dev_data, model, None, args) dev_res.append(cur_res) dev_res = print_dev_res(dev_res, args) print("Deleting model snapshot") os.system("rm -rf {}/*".format(out_dir)) if args.save: print("Save the best model to director saved-runs") best_dir = os.path.abspath(os.path.join(os.path.curdir, "saved-runs", args.mode, "-".join(args.src_dataset) + '_' + args.tar_dataset + '_' + timestamp)) if not os.path.exists(best_dir): os.makedirs(best_dir) best_dir = os.path.join(best_dir, 'best') model_utils.save_model(model, best_dir) with open(best_dir+'_args.txt', 'w') as f: for attr, value in sorted(args.__dict__.items()): f.write("{}={}\n".format(attr.upper(), value)) return dev_res, best_dir, model return dev_res, out_dir, model def _get_mask(text_len, cuda): idxes = torch.arange(0, int(torch.max(text_len)), out=torch.LongTensor(torch.max(text_len).item())).unsqueeze(0) if cuda: idxes = idxes.cuda() text_mask = (idxes < text_len.unsqueeze(1)).float().detach() return text_mask def train_batch(model, src_batch, src_unlbl_batches, tar_batches, optimizer, args): for key in model.keys(): model[key].eval() if key in optimizer: optimizer[key].zero_grad() if args.l_wd != 0 and args.mode == 'train_r2a': model['critic'].train() i = 0 while True: tar_text, _, _, _, _, tar_text_len, _, _, _ = next(tar_batches) tar_text, tar_text_len = _to_tensor([tar_text, tar_text_len], args.cuda) src_text, _, _, _, _, src_text_len, _, _, _ = next(src_unlbl_batches) src_text, src_text_len = _to_tensor([src_text, src_text_len], args.cuda) tar_hidden, _ = model['encoder'](tar_text, tar_text_len, False) src_hidden, _ = model['encoder'](src_text, src_text_len, False) invar_tar_hidden = model['transform'](tar_hidden) invar_src_hidden = model['transform'](src_hidden) optimizer['critic'].zero_grad() loss_wd, grad_penalty = model['critic']( invar_src_hidden.detach(), src_text_len, invar_tar_hidden.detach(), tar_text_len, False) loss = -loss_wd + args.l_grad_penalty * grad_penalty loss.backward() optimizer['critic'].step() i += 1 if i >= args.critic_steps and _to_number(loss_wd) > 0: break for key in model.keys(): model[key].train() if key in optimizer: optimizer[key].zero_grad() model['critic'].eval() result = {} for task, batch in src_batch.items(): text, rat_freq, rationale, gold_att, pred_att, text_len, label, _, epoch = batch text, text_len, rat_freq, rationale, gold_att, pred_att, label = _to_tensor( [text, text_len, rat_freq, rationale, gold_att, pred_att, label], args.cuda) text_mask = _get_mask(text_len, args.cuda) hidden, loss_src_lm = model['encoder'](text, text_len, True) invar_hidden = model['transform'](hidden) loss_tar_lm = 0 if args.l_wd != 0 and args.mode == 'train_r2a': tar_text, _, _, _, _, tar_text_len, _, _, _ = next(tar_batches) tar_text, tar_text_len = _to_tensor([tar_text, tar_text_len], args.cuda) tar_hidden, loss_tar_lm = model['encoder'](tar_text, tar_text_len, True) invar_tar_hidden = model['transform'](tar_hidden) loss_wd, _ = model['critic'](invar_hidden, text_len, invar_tar_hidden, tar_text_len, True) else: loss_wd = 0 if args.mode == 'train_clf' and not args.fine_tune_encoder: hidden = hidden.detach() out, att, log_att = model[task](hidden, text_mask) if args.num_classes[task] == 1: loss_lbl = F.mse_loss(torch.sigmoid(out.squeeze(1)), label) else: loss_lbl = F.cross_entropy(out, label) if _to_number(torch.min(torch.sum(rationale, dim=1))) < 0.5: rationale = rationale + 1e-6 normalized_rationale = rationale * text_mask normalized_rationale = normalized_rationale / torch.sum(normalized_rationale, dim=1, keepdim=True) if args.mode == 'train_clf': if args.att_target == 'gold_att': pred_att = gold_att elif args.att_target == 'rationale': pred_att = normalized_rationale elif args.att_target == 'pred_att': pred_att = pred_att else: raise ValueError('Invalid supervision type.') log_pred_att = torch.log(pred_att) elif args.mode == 'train_r2a': pred_att, log_pred_att = model['r2a']( invar_hidden, rationale, rat_freq, text_len, text_mask) else: raise ValueError('Invalid mode') loss_a2r = 0 loss_r2a = 1 - torch.mean(F.cosine_similarity(att, pred_att)) if args.mode == 'train_r2a': loss_a2r = 1 - torch.mean(F.cosine_similarity(pred_att, normalized_rationale)) if _to_number(loss_r2a) < 0.1: loss_r2a = 0 else: loss_r2a = loss_r2a - 0.1 if _to_number(loss_a2r) < 0.1: loss_a2r = 0 else: loss_a2r = loss_a2r - 0.1 loss = loss_lbl + args.l_r2a * loss_r2a + args.l_wd * loss_wd + args.l_lm * (loss_src_lm + loss_tar_lm) + args.l_a2r * loss_a2r loss.backward() optimizer[task].step() result[task] = { 'loss_lbl': _to_number(loss_lbl), 'loss_r2a': _to_number(loss_r2a), 'loss_wd': _to_number(loss_wd), 'loss_src_lm': _to_number(loss_src_lm), 'loss_tar_lm': _to_number(loss_tar_lm), 'loss_a2r' : _to_number(loss_a2r), 'epoch': epoch, } optimizer['encoder'].step() optimizer['r2a'].step() return result
MIT License
bihealth/sodar_core
projectroles/forms.py
get_user_widget
python
def get_user_widget( scope='all', project=None, exclude=None, forward=None, url=None, widget_class=None, ): url = url or 'projectroles:ajax_autocomplete_user' wg = {'url': url, 'forward': [dal_forward.Const(scope, 'scope')]} if project: p_uuid = ( str(project.sodar_uuid) if isinstance(project, Project) else str(project) ) wg['forward'].append(dal_forward.Const(p_uuid, 'project')) if forward and isinstance(forward, list): wg['forward'] += forward if exclude: wg['forward'].append( dal_forward.Const( [ str(u.sodar_uuid) if isinstance(u, User) else u for u in exclude ], 'exclude', ) ) if widget_class: return widget_class(**wg) return SODARUserAutocompleteWidget(**wg)
Get an user autocomplete widget for your form. :param scope: Scope of users to include: "all"/"project"/"project_exclude" :param project: Project object or project UUID string (optional) :param exclude: List of User objects or User UUIDs to exclude :param forward: Parameters to forward to autocomplete view (optional) :param url: Autocomplete ajax class override (optional) :param widget_class: Widget class override (optional) :return: SODARUserAutocompleteWidget or an overridden widget class
https://github.com/bihealth/sodar_core/blob/4176f762b77fae4dfdf24d51328938b94d3a64ce/projectroles/forms.py#L130-L177
import json import logging from django import forms from django.conf import settings from django.contrib import auth from django.core.exceptions import ValidationError from django.urls import reverse from django.utils import timezone from pagedown.widgets import PagedownWidget from dal import autocomplete, forward as dal_forward from projectroles.models import ( Project, Role, RoleAssignment, ProjectInvite, RemoteSite, SODAR_CONSTANTS, APP_SETTING_VAL_MAXLENGTH, ) from projectroles.plugins import get_active_plugins from projectroles.utils import ( get_display_name, get_user_display_name, build_secret, ) from projectroles.app_settings import AppSettingAPI, APP_SETTING_LOCAL_DEFAULT PROJECT_ROLE_OWNER = SODAR_CONSTANTS['PROJECT_ROLE_OWNER'] PROJECT_ROLE_CONTRIBUTOR = SODAR_CONSTANTS['PROJECT_ROLE_CONTRIBUTOR'] PROJECT_ROLE_DELEGATE = SODAR_CONSTANTS['PROJECT_ROLE_DELEGATE'] PROJECT_ROLE_GUEST = SODAR_CONSTANTS['PROJECT_ROLE_GUEST'] PROJECT_TYPE_CATEGORY = SODAR_CONSTANTS['PROJECT_TYPE_CATEGORY'] PROJECT_TYPE_PROJECT = SODAR_CONSTANTS['PROJECT_TYPE_PROJECT'] PROJECT_TYPE_CHOICES = [ ( PROJECT_TYPE_CATEGORY, get_display_name(PROJECT_TYPE_CATEGORY, title=True), ), (PROJECT_TYPE_PROJECT, get_display_name(PROJECT_TYPE_PROJECT, title=True)), ] SUBMIT_STATUS_OK = SODAR_CONSTANTS['SUBMIT_STATUS_OK'] SUBMIT_STATUS_PENDING = SODAR_CONSTANTS['SUBMIT_STATUS_PENDING'] SUBMIT_STATUS_PENDING_TASKFLOW = SODAR_CONSTANTS['SUBMIT_STATUS_PENDING'] SITE_MODE_SOURCE = SODAR_CONSTANTS['SITE_MODE_SOURCE'] SITE_MODE_TARGET = SODAR_CONSTANTS['SITE_MODE_TARGET'] APP_SETTING_SCOPE_PROJECT = SODAR_CONSTANTS['APP_SETTING_SCOPE_PROJECT'] APP_NAME = 'projectroles' INVITE_EXPIRY_DAYS = settings.PROJECTROLES_INVITE_EXPIRY_DAYS User = auth.get_user_model() class SODARFormMixin: def __init__(self, *args, **kwargs): self.logger = logging.getLogger(__name__) super().__init__(*args, **kwargs) def add_error(self, field, error): if isinstance(error, ValidationError): log_err = ';'.join(error.messages) else: log_err = error log_msg = 'Field "{}": {}'.format(field, log_err) if hasattr(self, 'current_user') and self.current_user: log_msg += ' (user={})'.format(self.current_user.username) self.logger.error(log_msg) super().add_error(field, error) class SODARForm(SODARFormMixin, forms.Form): class SODARModelForm(SODARFormMixin, forms.ModelForm): class SODARPagedownWidget(PagedownWidget): class Media: css = {'all': ['projectroles/css/pagedown.css']} class SODARUserAutocompleteWidget(autocomplete.ModelSelect2): def filter_choices_to_render(self, selected_choices): self.choices.queryset = self.choices.queryset.filter( sodar_uuid__in=[c for c in selected_choices if c] ) class SODARUserRedirectWidget(SODARUserAutocompleteWidget): autocomplete_function = 'autocomplete_redirect'
MIT License
labd/commercetools-python-sdk
src/commercetools/services/customer_groups.py
CustomerGroupService.update_by_key
python
def update_by_key( self, key: str, version: int, actions: typing.List[CustomerGroupUpdateAction], *, expand: OptionalListStr = None, force_update: bool = False, ) -> CustomerGroup: params = self._serialize_params({"expand": expand}, _CustomerGroupUpdateSchema) update_action = CustomerGroupUpdate(version=version, actions=actions) return self._client._post( endpoint=f"customer-groups/key={key}", params=params, data_object=update_action, response_class=CustomerGroup, force_update=force_update, )
Updates a customer group by Key.
https://github.com/labd/commercetools-python-sdk/blob/d8ec285f08d56ede2e4cad45c74833f5b609ab5c/src/commercetools/services/customer_groups.py#L115-L133
import typing from commercetools.helpers import RemoveEmptyValuesMixin from commercetools.platform.models.customer_group import ( CustomerGroup, CustomerGroupDraft, CustomerGroupPagedQueryResponse, CustomerGroupUpdate, CustomerGroupUpdateAction, ) from commercetools.typing import OptionalListStr from . import abstract, traits class _CustomerGroupQuerySchema( traits.ExpandableSchema, traits.SortableSchema, traits.PagingSchema, traits.QuerySchema, ): pass class _CustomerGroupUpdateSchema(traits.ExpandableSchema, traits.VersionedSchema): pass class _CustomerGroupDeleteSchema(traits.VersionedSchema, traits.ExpandableSchema): pass class CustomerGroupService(abstract.AbstractService): def get_by_id(self, id: str, *, expand: OptionalListStr = None) -> CustomerGroup: params = self._serialize_params({"expand": expand}, traits.ExpandableSchema) return self._client._get( endpoint=f"customer-groups/{id}", params=params, response_class=CustomerGroup, ) def get_by_key(self, key: str, *, expand: OptionalListStr = None) -> CustomerGroup: params = self._serialize_params({"expand": expand}, traits.ExpandableSchema) return self._client._get( endpoint=f"customer-groups/key={key}", params=params, response_class=CustomerGroup, ) def query( self, *, expand: OptionalListStr = None, sort: OptionalListStr = None, limit: int = None, offset: int = None, with_total: bool = None, where: OptionalListStr = None, predicate_var: typing.Dict[str, str] = None, ) -> CustomerGroupPagedQueryResponse: params = self._serialize_params( { "expand": expand, "sort": sort, "limit": limit, "offset": offset, "with_total": with_total, "where": where, "predicate_var": predicate_var, }, _CustomerGroupQuerySchema, ) return self._client._get( endpoint="customer-groups", params=params, response_class=CustomerGroupPagedQueryResponse, ) def create( self, draft: CustomerGroupDraft, *, expand: OptionalListStr = None ) -> CustomerGroup: params = self._serialize_params({"expand": expand}, traits.ExpandableSchema) return self._client._post( endpoint="customer-groups", params=params, data_object=draft, response_class=CustomerGroup, ) def update_by_id( self, id: str, version: int, actions: typing.List[CustomerGroupUpdateAction], *, expand: OptionalListStr = None, force_update: bool = False, ) -> CustomerGroup: params = self._serialize_params({"expand": expand}, _CustomerGroupUpdateSchema) update_action = CustomerGroupUpdate(version=version, actions=actions) return self._client._post( endpoint=f"customer-groups/{id}", params=params, data_object=update_action, response_class=CustomerGroup, force_update=force_update, )
MIT License
dmlc/gluon-cv
gluoncv/torch/data/transforms/videotransforms/video_transforms.py
CenterCrop.__call__
python
def __call__(self, clip): h, w = self.size if isinstance(clip[0], np.ndarray): im_h, im_w, im_c = clip[0].shape elif isinstance(clip[0], PIL.Image.Image): im_w, im_h = clip[0].size else: raise TypeError('Expected numpy.ndarray or PIL.Image' + 'but got list of {0}'.format(type(clip[0]))) if w > im_w or h > im_h: error_msg = ( 'Initial image size should be larger then ' 'cropped size but got cropped sizes : ({w}, {h}) while ' 'initial image is ({im_w}, {im_h})'.format( im_w=im_w, im_h=im_h, w=w, h=h)) raise ValueError(error_msg) x1 = int(round((im_w - w) / 2.)) y1 = int(round((im_h - h) / 2.)) cropped = F.crop_clip(clip, y1, x1, h, w) return cropped
Args: img (PIL.Image or numpy.ndarray): List of images to be cropped in format (h, w, c) in numpy.ndarray Returns: PIL.Image or numpy.ndarray: Cropped list of images
https://github.com/dmlc/gluon-cv/blob/f22650a5d31c31956d9392530a0e619689cdb3c5/gluoncv/torch/data/transforms/videotransforms/video_transforms.py#L265-L294
import numbers import random import numpy as np import PIL import torch import torchvision from . import functional as F class Compose(object): def __init__(self, transforms): self.transforms = transforms def __call__(self, clip): for t in self.transforms: clip = t(clip) return clip class RandomHorizontalFlip(object): def __call__(self, clip): if random.random() < 0.5: if isinstance(clip[0], np.ndarray): return [np.fliplr(img) for img in clip] elif isinstance(clip[0], PIL.Image.Image): return [ img.transpose(PIL.Image.FLIP_LEFT_RIGHT) for img in clip ] else: raise TypeError('Expected numpy.ndarray or PIL.Image' + ' but got list of {0}'.format(type(clip[0]))) return clip class RandomResize(object): def __init__(self, ratio=(3. / 4., 4. / 3.), interpolation='nearest'): self.ratio = ratio self.interpolation = interpolation def __call__(self, clip): scaling_factor = random.uniform(self.ratio[0], self.ratio[1]) if isinstance(clip[0], np.ndarray): im_h, im_w, im_c = clip[0].shape elif isinstance(clip[0], PIL.Image.Image): im_w, im_h = clip[0].size new_w = int(im_w * scaling_factor) new_h = int(im_h * scaling_factor) new_size = (new_w, new_h) resized = F.resize_clip( clip, new_size, interpolation=self.interpolation) return resized class Resize(object): def __init__(self, size, interpolation='nearest'): self.size = size self.interpolation = interpolation def __call__(self, clip): resized = F.resize_clip( clip, self.size, interpolation=self.interpolation) return resized class RandomCrop(object): def __init__(self, size): if isinstance(size, numbers.Number): size = (size, size) self.size = size def __call__(self, clip): h, w = self.size if isinstance(clip[0], np.ndarray): im_h, im_w, im_c = clip[0].shape elif isinstance(clip[0], PIL.Image.Image): im_w, im_h = clip[0].size else: raise TypeError('Expected numpy.ndarray or PIL.Image' + 'but got list of {0}'.format(type(clip[0]))) if w > im_w or h > im_h: error_msg = ( 'Initial image size should be larger then ' 'cropped size but got cropped sizes : ({w}, {h}) while ' 'initial image is ({im_w}, {im_h})'.format( im_w=im_w, im_h=im_h, w=w, h=h)) raise ValueError(error_msg) x1 = random.randint(0, im_w - w) y1 = random.randint(0, im_h - h) cropped = F.crop_clip(clip, y1, x1, h, w) return cropped class ThreeCrop(object): def __init__(self, size): if isinstance(size, numbers.Number): size = (size, size) self.size = size def __call__(self, clip): h, w = self.size if isinstance(clip[0], np.ndarray): im_h, im_w, im_c = clip[0].shape elif isinstance(clip[0], PIL.Image.Image): im_w, im_h = clip[0].size else: raise TypeError('Expected numpy.ndarray or PIL.Image' + 'but got list of {0}'.format(type(clip[0]))) if w != im_w and h != im_h: clip = F.resize_clip(clip, self.size, interpolation="bilinear") im_h, im_w, im_c = clip[0].shape step = np.max((np.max((im_w, im_h)) - self.size[0]) // 2, 0) cropped = [] for i in range(3): if (im_h > self.size[0]): x1 = 0 y1 = i * step cropped.extend(F.crop_clip(clip, y1, x1, h, w)) else: x1 = i * step y1 = 0 cropped.extend(F.crop_clip(clip, y1, x1, h, w)) return cropped class RandomRotation(object): def __init__(self, degrees): if isinstance(degrees, numbers.Number): if degrees < 0: raise ValueError('If degrees is a single number,' 'must be positive') degrees = (-degrees, degrees) else: if len(degrees) != 2: raise ValueError('If degrees is a sequence,' 'it must be of len 2.') self.degrees = degrees def __call__(self, clip): import skimage angle = random.uniform(self.degrees[0], self.degrees[1]) if isinstance(clip[0], np.ndarray): rotated = [skimage.transform.rotate(img, angle) for img in clip] elif isinstance(clip[0], PIL.Image.Image): rotated = [img.rotate(angle) for img in clip] else: raise TypeError('Expected numpy.ndarray or PIL.Image' + 'but got list of {0}'.format(type(clip[0]))) return rotated class CenterCrop(object): def __init__(self, size): if isinstance(size, numbers.Number): size = (size, size) self.size = size
Apache License 2.0
tum-pbs/phiflow
phi/field/_scene.py
Scene.put_properties
python
def put_properties(self, update: dict = None, **kw_updates): self._init_properties() if update: self._properties.update(update) self._properties.update(kw_updates) self._write_properties()
Updates the properties dictionary and stores it in `description.json` of all scene folders. Args: update: new values, must be JSON serializable. kw_updates: additional update as keyword arguments. This overrides `update`.
https://github.com/tum-pbs/phiflow/blob/4a85f8a5029aa4e30a791daa659f2c8e1536e37e/phi/field/_scene.py#L312-L324
import inspect import json import os import re import shutil import sys import warnings from os.path import join, isfile, isdir, abspath, expanduser, basename, split from phi import struct, math, __version__ as phi_version from ._field import Field, SampledField from ._field_io import read, write from ..math import Shape, batch def read_sim_frame(directory: math.Tensor, names: str or tuple or list or dict or struct.Struct, frame: int, convert_to_backend=True): def single_read(name): name = _slugify_filename(name) files = math.map(lambda dir_: _filename(dir_, name, frame), directory) return read(files, convert_to_backend=convert_to_backend) return struct.map(single_read, names) def write_sim_frame(directory: math.Tensor, fields: Field or tuple or list or dict or struct.Struct, frame: int, names: str or tuple or list or struct.Struct or None = None): if names is None: names = struct.names(fields) if frame > 1000000: warnings.warn(f"frame too large: {frame}. Data will be saved but filename might cause trouble in the future.") def single_write(f, name): name = _slugify_filename(name) files = math.map(lambda dir_: _filename(dir_, name, frame), directory) if isinstance(f, SampledField): write(f, files) elif isinstance(f, math.Tensor): raise NotImplementedError() elif isinstance(f, Field): raise ValueError("write_sim_frame: only SampledField instances are saved. Resample other Fields before saving them.") else: raise ValueError(f"write_sim_frame: only SampledField instances can be saved but got {f}") struct.foreach(single_write, fields, names) def _filename(simpath, name, frame): return join(simpath, f"{slugify(name)}_{frame:06d}.npz") def _str(bytes_or_str): if isinstance(bytes_or_str, str): return bytes_or_str else: return str(bytes_or_str, 'utf-8') def get_fieldnames(simpath) -> tuple: fieldnames_set = {_str(f)[:-11] for f in os.listdir(simpath) if _str(f).endswith(".npz")} return tuple(sorted(fieldnames_set)) def get_frames(path: str, field_name: str = None, mode=set.intersection) -> tuple: if field_name is not None: all_frames = {int(f[-10:-4]) for f in os.listdir(path) if _str(f).startswith(field_name) and _str(f).endswith(".npz")} return tuple(sorted(all_frames)) else: fields = get_fieldnames(path) if not fields: return () frames_sets = [set(get_frames(path, field)) for field in fields] frames = mode(*frames_sets) return tuple(sorted(frames)) class Scene(object): def __init__(self, paths: str or math.Tensor): self._paths = math.wrap(paths) self._properties: dict or None = None @property def shape(self): return self._paths.shape @property def is_batch(self): return self._paths.rank > 0 @property def path(self) -> str: assert not self.is_batch, "Scene.path is not defined for scene batches." return self._paths.native() @property def paths(self) -> math.Tensor: return self._paths @staticmethod def stack(*scenes: 'Scene', dim: Shape = batch('batch')) -> 'Scene': return Scene(math.stack([s._paths for s in scenes], dim)) @staticmethod def create(parent_directory: str, shape: math.Shape = math.EMPTY_SHAPE, copy_calling_script=True, **dimensions) -> 'Scene': shape = shape & math.batch(**dimensions) parent_directory = expanduser(parent_directory) abs_dir = abspath(parent_directory) if not isdir(abs_dir): os.makedirs(abs_dir) next_id = 0 else: indices = [int(name[4:]) for name in os.listdir(abs_dir) if name.startswith("sim_")] next_id = max([-1] + indices) + 1 ids = math.wrap(tuple(range(next_id, next_id + shape.volume))).vector.split(shape) paths = math.map(lambda id_: join(parent_directory, f"sim_{id_:06d}"), ids) scene = Scene(paths) scene.mkdir() if copy_calling_script: try: scene.copy_calling_script() except IOError as err: warnings.warn(f"Failed to copy calling script to scene during Scene.create(): {err}") return scene @staticmethod def list(parent_directory: str, include_other: bool = False, dim: Shape or None = None) -> 'Scene' or tuple: parent_directory = expanduser(parent_directory) abs_dir = abspath(parent_directory) if not isdir(abs_dir): return () names = [sim for sim in os.listdir(abs_dir) if sim.startswith("sim_") or (include_other and isdir(join(abs_dir, sim)))] if dim is None: return tuple(Scene(join(parent_directory, name)) for name in names) else: paths = math.wrap([join(parent_directory, name) for name in names], dim) return Scene(paths) @staticmethod def at(directory: str or tuple or list or math.Tensor or 'Scene', id: int or math.Tensor or None = None) -> 'Scene': if isinstance(directory, Scene): assert id is None, f"Got id={id} but directory is already a Scene." return directory if isinstance(directory, (tuple, list)): directory = math.wrap(directory, batch('scenes')) directory = math.map(lambda d: expanduser(d), math.wrap(directory)) if id is None: paths = directory else: id = math.wrap(id) paths = math.map(lambda d, i: join(d, f"sim_{i:06d}"), directory, id) for path in math.flatten(paths): if not isdir(path): raise IOError(f"There is no scene at '{path}'") return Scene(paths) def subpath(self, name: str, create: bool = False) -> str or tuple: def single_subpath(path): path = join(path, name) if create and not isdir(path): os.mkdir(path) return path result = math.map(single_subpath, self._paths) if result.rank == 0: return result.native() else: return result def _init_properties(self): if self._properties is not None: return json_file = join(next(iter(math.flatten(self._paths))), "description.json") if isfile(json_file): with open(json_file) as stream: self._properties = json.load(stream) else: self._properties = {} def exist_properties(self): if self._properties is not None: return True else: json_file = join(next(iter(math.flatten(self._paths))), "description.json") return isfile(json_file) def exists_config(self): if isinstance(self.path, str): return isfile(join(self.path, "description.json")) else: return any(isfile(join(p, "description.json")) for p in self.path) @property def properties(self): self._init_properties() return self._properties @properties.setter def properties(self, dict): self._properties = dict with open(join(self.path, "description.json"), "w") as out: json.dump(self._properties, out, indent=2) def put_property(self, key, value): self._init_properties() self._properties[key] = value self._write_properties()
MIT License
terraref/computing-pipeline
scripts/rebuild_scripts/buildClowderInstance.py
getDanforthSnapshotFromPath
python
def getDanforthSnapshotFromPath(filepath): parts = filepath.split("/") if parts[-2].find("snapshot") > -1: img_metadata = getImageMdFromCSV(parts[-2].replace('snapshot',''), os.path.basename(filepath).replace('.png', '')) return { "snapshot": parts[-2], "md": img_metadata } else: { "snapshot": None, "md": None }
Get snapshot info from path.
https://github.com/terraref/computing-pipeline/blob/5d0d089501154c6c0de68229579c131d79e39b5e/scripts/rebuild_scripts/buildClowderInstance.py#L378-L392
import sys, os, json import requests import psycopg2 from urllib3.filepost import encode_multipart_formdata def main(): with open(inputfile, 'r') as inp: curr_group = { "sensor": None, "date": None, "timestamp": None, "metadata": None, "snapshot": None } curr_group_files = [] curr_group_file_metadata = {} lastLineFound = True if lastLine == "" else False for line in inp: full_path = line.rstrip() file_metadata = None if not lastLineFound: if lastLine == full_path: print("Found last line; resuming uploads.") lastLineFound = True else: full_path = full_path.replace("/gpfs/largeblockFS/projects/arpae/terraref/", "/home/clowder/") if full_path == "": continue curr_info = {"sensor": None, "date": None, "timestamp": None, "metadata": None, "snapshot": None} if full_path.find("raw_data") > -1: if full_path.endswith("metadata.json"): if full_path.find("/danforth/") > -1: curr_info = getDanforthInfoFromJson(full_path) if curr_group['sensor'] == None: curr_group = curr_info else: curr_info = getGantryInfoFromPath(full_path) curr_info['metadata'] = getGantryMetadata(full_path) if curr_group['sensor'] == None: curr_group = curr_info else: if full_path.find("/danforth/") > -1: snap_info = getDanforthSnapshotFromPath(full_path) if snap_info['snapshot'] is None: continue else: curr_info['snapshot'] = snap_info["snapshot"] file_metadata = snap_info["md"] else: curr_info = getGantryInfoFromPath(full_path) if full_path.find("Level_1") > -1: if full_path.find("ddpscIndoorSuite") > -1: timeinfo = full_path.split("/")[-2].replace("ddpscIndoorSuite - ", "") date = timeinfo.split("__")[0] curr_info = { "sensor": "ddpscIndoorSuite", "date": date, "timestamp": timeinfo, "snapshot": timeinfo, "metadata": { "@context": ["https://clowder.ncsa.illinois.edu/contexts/metadata.jsonld"], "content": {"status": "COMPLETED"}, "agent": { "@type": "cat:extractor", "extractor_id": clowderURL + "/api/extractors/terra.plantcv" } } } elif full_path.find("demosaic") > -1: timeinfo = full_path.split("/")[-2] date = timeinfo.split("__")[0] curr_info = { "sensor": "stereoTop", "date": date, "timestamp": timeinfo, "metadata": { "@context": ["https://clowder.ncsa.illinois.edu/contexts/metadata.jsonld"], "content": {"status": "COMPLETED"}, "agent": { "@type": "cat:extractor", "extractor_id": clowderURL + "/api/extractors/terra.demosaic" } }, "snapshot": None } elif full_path.find("EnvironmentLogger") > -1: date = full_path.split("/")[-2] curr_info = { "sensor": "EnvironmentLogger", "date": date, "timestamp": None, "metadata": None, "snapshot": None } elif full_path.find("scanner3DTop") > -1: timeinfo = full_path.split("/")[-2] date = timeinfo.split("__")[0] curr_info = { "sensor": "scanner3DTop", "date": date, "timestamp": timeinfo, "metadata": None, "snapshot": None } submit = False if full_path.find("/danforth/") > -1: if curr_info['snapshot'] != curr_group['snapshot']: submit = True elif curr_info['metadata'] is not None: curr_group['metadata'] = curr_info['metadata'] else: if (curr_info["sensor"] != curr_group["sensor"] or curr_info["date"] != curr_group["date"] or curr_info["timestamp"] != curr_group["timestamp"]): submit = True elif curr_info['metadata'] is not None: curr_group['metadata'] = curr_info['metadata'] if submit: if curr_group['sensor'] is not None: curr_group['files'] = curr_group_files curr_group['file_md'] = curr_group_file_metadata submitGroupToClowder(curr_group) curr_group = curr_info curr_group_files = [] if not full_path.endswith("metadata.json"): curr_group_files.append(full_path) if file_metadata: curr_group_file_metadata[full_path] = file_metadata curr_group['files'] = curr_group_files curr_group['file_md'] = curr_group_file_metadata submitGroupToClowder(curr_group) def connectToPostgres(): try: conn = psycopg2.connect(dbname='globusmonitor') except: conn = psycopg2.connect(dbname='postgres') conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) curs = conn.cursor() curs.execute('CREATE DATABASE globusmonitor;') curs.close() conn.commit() conn.close() conn = psycopg2.connect(dbname='globusmonitor') initializeDatabase(conn) print("Connected to Postgres") return conn def loadJsonFile(jsonfile): try: f = open(jsonfile) jsonobj = json.load(f) f.close() return jsonobj except IOError: print("- unable to open %s" % jsonfile) return {} def barcode_parser(barcode): parsed_barcode = {} parsed_barcode['species'] = barcode[0:2] parsed_barcode['genotype'] = barcode[0:5] parsed_barcode['treatment'] = barcode[5:7] parsed_barcode['unique_id'] = barcode[7:] return parsed_barcode def metadata_to_json(filename, metadata, data, fields): img_meta = filename.split('_') if img_meta[0] == 'VIS': camera_type = 'visible/RGB' elif img_meta[0] == 'NIR': camera_type = 'near-infrared' if img_meta[1] == 'SV': perspective = 'side-view' elif img_meta[1] == 'TV': perspective = 'top-view' if len(img_meta) == 8: rotation_angle = img_meta[2] zoom = (0.0008335 * int(img_meta[3].replace('z', ''))) + 0.9991665 stage_position = img_meta[4].replace('h', '') camera_gain = img_meta[5].replace('g', '') camera_exposure = img_meta[6].replace('e', '') img_id = img_meta[7] elif len(img_meta) == 7: rotation_angle = 0 zoom = (0.0008335 * int(img_meta[2].replace('z', ''))) + 0.9991665 stage_position = img_meta[3].replace('h', '') camera_gain = img_meta[4].replace('g', '') camera_exposure = img_meta[5].replace('e', '') img_id = img_meta[6] parsed_barcode = barcode_parser(data[fields['plantbarcode']]) if parsed_barcode['species'] in metadata['sample']['barcode']['species']: species = metadata['sample']['barcode']['species'][parsed_barcode['species']] if parsed_barcode['genotype'] in metadata['sample']['barcode']['genotypes']: genotype = metadata['sample']['barcode']['genotypes'][parsed_barcode['genotype']] if parsed_barcode['treatment'] in metadata['sample']['barcode']['treatments']: treatment = metadata['sample']['barcode']['treatments'][parsed_barcode['treatment']] file_metadata = {'snapshot_id' : data[fields['id']], 'plant_barcode' : data[fields['plantbarcode']], 'camera_type' : camera_type, 'perspective' : perspective, 'rotation_angle' : rotation_angle, 'zoom' : zoom, 'imager_stage_vertical_position' : stage_position, 'camera_gain' : camera_gain, 'camera_exposure' : camera_exposure, 'image_id' : img_id, 'imagedate' : data[fields['timestamp']], 'species' : species, 'genotype' : genotype, 'treatment' : treatment, 'sample_id' : parsed_barcode['unique_id']} return file_metadata def getImageMdFromCSV(snapshotID, imagename): csvfile = open(danforthCSV, 'rU') header = csvfile.readline().rstrip('\n').replace(" ", "") exp_metadata = { "experiment" : { "planting_date": "2014-05-27", "title": "Sorghum Pilot Experiment - Danforth Center Phenotyping Facility - 2014-05-27", "author": "Noah Fahlgren", "project": "TERRA-REF", "location": "Donald Danforth Plant Science Center", "instrument": "Bellwether Phenotyping Facility", "growth_medium": "MetroMix360 potting mix with 14-14-14 Osmocote" }, "sample" : { "barcode" : { "format": { "species": "0-1", "genotype": "0-4", "treatment": "5-6", "unique_id": "7-12" }, "species": { "Fp": "Sorghum bicolor", "Fa": "Sorghum bicolor", "Fr": "Sorghum bicolor" }, "genotypes": { "Fr001": "BTx623", "Fp001": "BTx642", "Fp002": "Tx7000", "Fa001": "Tx430" }, "treatments": { "AA": "100%: 217 ml water (47.6% VWC)", "AB": "80%: 173.6 ml water (37.5% VWC)", "AC": "60%: 130.2 ml water (27.3% VWC)", "AD": "40%: 86.8 ml water (17.2% VWC)" } } } } cols = header.split(',') colnames = {} for i, col in enumerate(cols): colnames[col] = i for row in csvfile: data = row.rstrip('\n').split(',') if data[colnames['id']] != snapshotID: continue else: img_list = data[colnames['tiles']][:-1] imgs = img_list.split(';') for img in imgs: if img == imagename: img_md = metadata_to_json(img, exp_metadata, data, colnames) return img_md def getDanforthInfoFromJson(jsonpath): jsonobj = loadJsonFile(jsonpath) if 'experiment' in jsonobj: if 'planting_date' in jsonobj['experiment']: j_date = jsonobj['experiment']['planting_date'] elif "title" in jsonobj: j_date = jsonobj['experiment']['title'].split(" - ")[2] else: j_date = None else: if 'planting_date' in jsonobj: j_date = jsonobj['planting_date'] elif "title" in jsonobj: j_date = jsonobj['title'].split(" - ")[2] else: j_date = None return {"sensor": "ddpscIndoorSuite", "date": j_date, "timestamp": None, "snapshot": getDanforthSnapshotFromPath(jsonpath)["snapshot"], "metadata": jsonobj }
BSD 3-Clause New or Revised License
jweyn/mos-x
mosx/MesoPy.py
Meso._get_response
python
def _get_response(self, endpoint, request_dict): http_error = 'Could not connect to the API. This could be because you have no internet connection, a parameter' ' was input incorrectly, or the API is currently down. Please try again.' json_error = 'Could not retrieve JSON values. Try again with a shorter date range.' try: qsp = urllib.parse.urlencode(request_dict, doseq=True) resp = urllib.request.urlopen(self.base_url + endpoint + '?' + qsp).read() except AttributeError or NameError: try: qsp = urllib.urlencode(request_dict, doseq=True) resp = urllib2.urlopen(self.base_url + endpoint + '?' + qsp).read() except urllib2.URLError: raise MesoPyError(http_error) except urllib.error.URLError: raise MesoPyError(http_error) try: json_data = json.loads(resp.decode('utf-8')) except ValueError: raise MesoPyError(json_error) return self._checkresponse(json_data)
Returns a dictionary of data requested by each function. Arguments: ---------- endpoint: string, mandatory Set in all other methods, this is the API endpoint specific to each function. request_dict: string, mandatory A dictionary of parameters that are formatted into the API call. Returns: -------- response: A dictionary that has been dumped from JSON. Raises: ------- MesoPyError: Overrides the exceptions given in the requests library to give more custom error messages. Connection_error occurs if no internet connection exists. Timeout_error occurs if the request takes too long and redirect_error is shown if the url is formatted incorrectly.
https://github.com/jweyn/mos-x/blob/83b3dd5ac2e610b6a2934408d07f573789a3f233/mosx/MesoPy.py#L125-L171
try: import urllib.parse import urllib.request import urllib.error except ImportError: import urllib2 import urllib import json class MesoPyError(Exception): def __init__(self, error_message): self.error_message = error_message def __str__(self): return repr(self.error_message) class Meso(object): def __init__(self, token): self.base_url = 'http://api.mesowest.net/v2/' self.token = token self.geo_criteria = ['stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', 'subgacc'] @staticmethod def _checkresponse(response): results_error = 'No results were found matching your query' auth_error = 'The token or API key is not valid, please contact Josh Clark at joshua.m.clark@utah.edu to ' 'resolve this' rule_error = 'This request violates a rule of the API. Please check the guidelines for formatting a data ' 'request and try again' catch_error = 'Something went wrong. Check all your calls and try again' if response['SUMMARY']['RESPONSE_CODE'] == 1: return response elif response['SUMMARY']['RESPONSE_CODE'] == 2: raise MesoPyError(results_error) elif response['SUMMARY']['RESPONSE_CODE'] == 200: raise MesoPyError(auth_error) elif response['SUMMARY']['RESPONSE_CODE'] == 400: raise MesoPyError(rule_error) elif response['SUMMARY']['RESPONSE_CODE'] == -1: format_error = response['SUMMARY']['RESPONSE_MESSAGE'] raise MesoPyError(format_error) else: raise MesoPyError(catch_error)
MIT License
frmdstryr/enamlx
enamlx/widgets/graphics_view.py
GraphicsItem.show
python
def show(self): self.visible = True if self.proxy_is_active: self.proxy.ensure_visible()
Ensure the widget is shown. Calling this method will also set the widget visibility to True.
https://github.com/frmdstryr/enamlx/blob/798eefe146aac15e559315fe5ff42dd813656cea/enamlx/widgets/graphics_view.py#L515-L521
import sys from enum import IntFlag from atom.api import ( Atom, Float, Int, Typed, Bool, Coerced, ForwardTyped, Enum, List, Instance, Str, Value, Event, Property, observe, set_default, ) from enaml.colors import ColorMember from enaml.core.declarative import d_, d_func from enaml.fonts import FontMember from enaml.image import Image from enaml.layout.constrainable import ConstrainableMixin, PolicyEnum from enaml.widgets.widget import Feature from enaml.widgets.control import Control, ProxyControl from enaml.widgets.toolkit_object import ToolkitObject, ProxyToolkitObject NUMERIC = (int, float, long) if sys.version_info.major < 3 else (int, float) class GraphicFeature(IntFlag): MouseEvent = 0x08 WheelEvent = 0x16 DrawEvent = 0x32 BackgroundDrawEvent = 0x64 class Point(Atom): x = d_(Float(0, strict=False)) y = d_(Float(0, strict=False)) z = d_(Float(0, strict=False)) def __init__(self, x=0, y=0, z=0): super(Point, self).__init__(x=x, y=y, z=z) def __iter__(self): yield self.x yield self.y yield self.z def __len__(self): return 3 def __eq__(self, other): pos = (self.x, self.y, self.z) if isinstance(other, Point): return pos == (other.x, other.y, other.z) return pos == other def __add__(self, other): return Point( self.x + other[0], self.y + other[1], self.z + other[2] if len(other) > 2 else self.z, ) __radd__ = __add__ def __sub__(self, other): return Point( self.x - other[0], self.y - other[1], self.z - other[2] if len(other) > 2 else self.z, ) def __rsub__(self, other): return Point( other[0] - self.x, other[1] - self.y, other[2] - self.z if len(other) > 2 else self.z, ) def __mul__(self, other): if isinstance(other, NUMERIC): return Point(self.x * other, self.y * other, self.z * other) return Point( other[0] * self.x, other[1] * self.y, other[2] * self.z if len(other) > 2 else self.z, ) __rmul__ = __mul__ def __div__(self, other): if isinstance(other, NUMERIC): return Point(self.x / other, self.y / other, self.z / other) return Point( self.x / other[0], self.y / other[1], self.z / other[2] if len(other) > 2 else self.z, ) def __rdiv__(self, other): if isinstance(other, NUMERIC): return Point(other / self.x, other / self.y, other / self.z) return Point( other[0] / self.x, other[1] / self.y, other[2] / self.z if len(other) > 2 else self.z, ) def __neg__(self): return Point(-self.x, -self.y, -self.z) def __hash__(self): return id(self) def __getitem__(self, key): return (self.x, self.y, self.z)[key] def __repr__(self): return "Point(%d, %d, %d)" % (self.x, self.y, self.z) class Rect(Atom): x = d_(Float(0, strict=False)) y = d_(Float(0, strict=False)) width = d_(Float(0, strict=False)) height = d_(Float(0, strict=False)) def coerce_point(p): return p if isinstance(p, Point) else Point(*p) class PointMember(Coerced): def __init__(self, args=None, kwargs=None, factory=None, coercer=coerce_point): super(PointMember, self).__init__( Point, args, kwargs, factory=factory, coercer=coercer ) class Pen(Atom): color = ColorMember() width = Float(1.0, strict=False) line_style = Enum( "solid", "dash", "dot", "dash_dot", "dash_dot_dot", "custom", "none" ) cap_style = Enum("square", "flat", "round") join_style = Enum("bevel", "miter", "round") dash_pattern = List(Float(strict=False)) _tkdata = Value() class Brush(Atom): color = ColorMember() image = Instance(Image) style = Enum( "solid", "dense1", "dense2", "dense3", "dense4", "dense5", "dense6", "dense7", "horizontal", "vertical", "cross", "diag", "bdiag", "fdiag", "linear", "radial", "conical", "texture", "none", ) _tkdata = Value() class ProxyGraphicsView(ProxyControl): declaration = ForwardTyped(lambda: GraphicsView) def set_auto_range(self, enabled): raise NotImplementedError def set_antialiasing(self, enabled): raise NotImplementedError def set_drag_mode(self, mode): raise NotImplementedError def set_renderer(self, renderer): raise NotImplementedError def get_item_at(self, point): raise NotImplementedError def set_lock_aspect_ratio(self, locked): raise NotImplementedError def set_selected_items(self, items): raise NotImplementedError def fit_in_view(self, item): raise NotImplementedError def center_on(self, item): raise NotImplementedError def reset_view(self): raise NotImplementedError def translate_view(self, x, y): raise NotImplementedError def scale_view(self, x, y): raise NotImplementedError def rotate_view(self, angle): raise NotImplementedError def map_from_scene(self, point): raise NotImplementedError def map_to_scene(self, point): raise NotImplementedError def pixel_density(self): raise NotImplementedError class ProxyGraphicsItem(ProxyToolkitObject): declaration = ForwardTyped(lambda: GraphicsItem) def set_x(self, x): raise NotImplementedError def set_y(self, y): raise NotImplementedError def set_z(self, z): raise NotImplementedError def set_position(self, position): raise NotImplementedError def set_rotation(self, rotation): raise NotImplementedError def set_scale(self, scale): raise NotImplementedError def set_opacity(self, opacity): raise NotImplementedError def set_selected(self, selected): raise NotImplementedError def set_enabled(self, enabled): raise NotImplementedError def set_selectable(self, enabled): raise NotImplementedError def set_movable(self, enabled): raise NotImplementedError def set_visible(self, visible): raise NotImplementedError def set_tool_tip(self, tool_tip): raise NotImplementedError def set_status_tip(self, status_tip): raise NotImplementedError def request_update(self): raise NotImplementedError def ensure_visible(self): raise NotImplementedError def ensure_hidden(self): raise NotImplementedError def set_focus(self): raise NotImplementedError def clear_focus(self): raise NotImplementedError def has_focus(self): raise NotImplementedError class ProxyGraphicsItemGroup(ProxyGraphicsItem): declaration = ForwardTyped(lambda: GraphicsItemGroup) class ProxyGraphicsWidget(ProxyGraphicsItem): declaration = ForwardTyped(lambda: GraphicsWidget) class ProxyAbstractGraphicsShapeItem(ProxyGraphicsItem): declaration = ForwardTyped(lambda: AbstractGraphicsShapeItem) def set_pen(self, pen): raise NotImplementedError def set_brush(self, brush): raise NotImplementedError class ProxyGraphicsRectItem(ProxyAbstractGraphicsShapeItem): declaration = ForwardTyped(lambda: GraphicsRectItem) def set_width(self, width): raise NotImplementedError def set_height(self, height): raise NotImplementedError class ProxyGraphicsEllipseItem(ProxyAbstractGraphicsShapeItem): declaration = ForwardTyped(lambda: GraphicsEllipseItem) def set_width(self, width): raise NotImplementedError def set_height(self, height): raise NotImplementedError def set_span_angle(self, angle): raise NotImplementedError def set_start_angle(self, angle): raise NotImplementedError class ProxyGraphicsLineItem(ProxyAbstractGraphicsShapeItem): declaration = ForwardTyped(lambda: GraphicsLineItem) def set_point(self, point): raise NotImplementedError class ProxyGraphicsTextItem(ProxyAbstractGraphicsShapeItem): declaration = ForwardTyped(lambda: GraphicsTextItem) def set_text(self, text): raise NotImplementedError def set_font(self, font): raise NotImplementedError class ProxyGraphicsPolygonItem(ProxyAbstractGraphicsShapeItem): declaration = ForwardTyped(lambda: GraphicsPolygonItem) def set_points(self, points): raise NotImplementedError class ProxyGraphicsPathItem(ProxyAbstractGraphicsShapeItem): declaration = ForwardTyped(lambda: GraphicsPathItem) def set_path(self, path): raise NotImplementedError class ProxyGraphicsImageItem(ProxyGraphicsItem): declaration = ForwardTyped(lambda: GraphicsImageItem) def set_image(self, image): raise NotImplementedError class GraphicsItem(ToolkitObject, ConstrainableMixin): proxy = Typed(ProxyGraphicsItem) position = d_(PointMember()) rotation = d_(Float(strict=False)) scale = d_(Float(1.0, strict=False)) opacity = d_(Float(1.0, strict=False)) selected = d_(Bool()) enabled = d_(Bool(True)) visible = d_(Bool(True)) tool_tip = d_(Str()) status_tip = d_(Str()) features = d_(Coerced(Feature, (0,))) extra_features = d_(Coerced(GraphicFeature, (0,))) request_update = d_(Event()) selectable = d_(Bool()) movable = d_(Bool()) @observe( "position", "position.x", "position.y", "position.z", "scale", "rotation", "opacity", "selected", "enabled", "visible", "tool_tip", "status_tip", "request_update", "selectable", "movable", ) def _update_proxy(self, change): super(GraphicsItem, self)._update_proxy(change)
MIT License
botfront/rasa-for-botfront
rasa/core/policies/policy.py
Policy.format_tracker_states
python
def format_tracker_states(states: List[Dict]) -> Text: formatted_states = [""] if states: for index, state in enumerate(states): state_messages = [] if state: if USER in state: if TEXT in state[USER]: state_messages.append( f"user text: {str(state[USER][TEXT])}" ) if INTENT in state[USER]: state_messages.append( f"user intent: {str(state[USER][INTENT])}" ) if ENTITIES in state[USER]: state_messages.append( f"user entities: {str(state[USER][ENTITIES])}" ) if PREVIOUS_ACTION in state: if ACTION_NAME in state[PREVIOUS_ACTION]: state_messages.append( f"previous action name: {str(state[PREVIOUS_ACTION][ACTION_NAME])}" ) if ACTION_TEXT in state[PREVIOUS_ACTION]: state_messages.append( f"previous action text: {str(state[PREVIOUS_ACTION][ACTION_TEXT])}" ) if ACTIVE_LOOP in state: state_messages.append(f"active loop: {str(state[ACTIVE_LOOP])}") if SLOTS in state: state_messages.append(f"slots: {str(state[SLOTS])}") state_message_formatted = " | ".join(state_messages) state_formatted = f"[state {str(index)}] {state_message_formatted}" formatted_states.append(state_formatted) return "\n".join(formatted_states)
Format tracker states to human readable format on debug log. Args: states: list of tracker states dicts Returns: the string of the states with user intents and actions
https://github.com/botfront/rasa-for-botfront/blob/6e0e48d0059e197b5f686df1e27935769c3641b7/rasa/core/policies/policy.py#L351-L396
import copy import json import logging from enum import Enum from pathlib import Path from typing import ( Any, List, Optional, Text, Dict, Callable, Type, Union, Tuple, TYPE_CHECKING, ) import numpy as np from rasa.core.exceptions import UnsupportedDialogueModelError from rasa.shared.core.events import Event import rasa.shared.utils.common import rasa.utils.common import rasa.shared.utils.io from rasa.shared.core.domain import Domain from rasa.core.featurizers.single_state_featurizer import SingleStateFeaturizer from rasa.core.featurizers.tracker_featurizers import ( TrackerFeaturizer, MaxHistoryTrackerFeaturizer, FEATURIZER_FILE, ) from rasa.shared.nlu.interpreter import NaturalLanguageInterpreter from rasa.shared.core.trackers import DialogueStateTracker from rasa.shared.core.generator import TrackerWithCachedStates from rasa.core.constants import DEFAULT_POLICY_PRIORITY from rasa.shared.core.constants import USER, SLOTS, PREVIOUS_ACTION, ACTIVE_LOOP from rasa.shared.nlu.constants import ENTITIES, INTENT, TEXT, ACTION_TEXT, ACTION_NAME from rasa.utils.tensorflow.constants import EPOCHS if TYPE_CHECKING: from rasa.shared.nlu.training_data.features import Features logger = logging.getLogger(__name__) class SupportedData(Enum): ML_DATA = 1 RULE_DATA = 2 ML_AND_RULE_DATA = 3 @staticmethod def trackers_for_policy( policy: Union["Policy", Type["Policy"]], trackers: Union[List[DialogueStateTracker], List[TrackerWithCachedStates]], ) -> Union[List[DialogueStateTracker], List[TrackerWithCachedStates]]: supported_data = policy.supported_data() if supported_data == SupportedData.RULE_DATA: return [tracker for tracker in trackers if tracker.is_rule_tracker] if supported_data == SupportedData.ML_DATA: return [tracker for tracker in trackers if not tracker.is_rule_tracker] return trackers class Policy: @staticmethod def supported_data() -> SupportedData: return SupportedData.ML_DATA @staticmethod def _standard_featurizer() -> MaxHistoryTrackerFeaturizer: return MaxHistoryTrackerFeaturizer(SingleStateFeaturizer()) @classmethod def _create_featurizer( cls, featurizer: Optional[TrackerFeaturizer] = None ) -> TrackerFeaturizer: if featurizer: return copy.deepcopy(featurizer) else: return cls._standard_featurizer() def __init__( self, featurizer: Optional[TrackerFeaturizer] = None, priority: int = DEFAULT_POLICY_PRIORITY, should_finetune: bool = False, **kwargs: Any, ) -> None: self.__featurizer = self._create_featurizer(featurizer) self.priority = priority self.finetune_mode = should_finetune @property def featurizer(self): return self.__featurizer @staticmethod def _get_valid_params(func: Callable, **kwargs: Any) -> Dict: valid_keys = rasa.shared.utils.common.arguments_of(func) params = {key: kwargs.get(key) for key in valid_keys if kwargs.get(key)} ignored_params = { key: kwargs.get(key) for key in kwargs.keys() if not params.get(key) } logger.debug(f"Parameters ignored by `model.fit(...)`: {ignored_params}") return params def featurize_for_training( self, training_trackers: List[DialogueStateTracker], domain: Domain, interpreter: NaturalLanguageInterpreter, bilou_tagging: bool = False, **kwargs: Any, ) -> Tuple[ List[List[Dict[Text, List["Features"]]]], np.ndarray, List[List[Dict[Text, List["Features"]]]], ]: state_features, label_ids, entity_tags = self.featurizer.featurize_trackers( training_trackers, domain, interpreter, bilou_tagging ) max_training_samples = kwargs.get("max_training_samples") if max_training_samples is not None: logger.debug( "Limit training data to {} training samples." "".format(max_training_samples) ) state_features = state_features[:max_training_samples] label_ids = label_ids[:max_training_samples] entity_tags = entity_tags[:max_training_samples] return state_features, label_ids, entity_tags def train( self, training_trackers: List[TrackerWithCachedStates], domain: Domain, interpreter: NaturalLanguageInterpreter, **kwargs: Any, ) -> None: raise NotImplementedError("Policy must have the capacity to train.") def predict_action_probabilities( self, tracker: DialogueStateTracker, domain: Domain, interpreter: NaturalLanguageInterpreter, **kwargs: Any, ) -> "PolicyPrediction": raise NotImplementedError("Policy must have the capacity to predict.") def _prediction( self, probabilities: List[float], events: Optional[List[Event]] = None, optional_events: Optional[List[Event]] = None, is_end_to_end_prediction: bool = False, is_no_user_prediction: bool = False, diagnostic_data: Optional[Dict[Text, Any]] = None, ) -> "PolicyPrediction": return PolicyPrediction( probabilities, self.__class__.__name__, self.priority, events, optional_events, is_end_to_end_prediction, is_no_user_prediction, diagnostic_data, ) def _metadata(self) -> Optional[Dict[Text, Any]]: pass @classmethod def _metadata_filename(cls) -> Optional[Text]: pass def persist(self, path: Union[Text, Path]) -> None: if self.featurizer is not None: self.featurizer.persist(path) file = Path(path) / self._metadata_filename() rasa.shared.utils.io.create_directory_for_file(file) rasa.shared.utils.io.dump_obj_as_json_to_file(file, self._metadata()) @classmethod def load(cls, path: Union[Text, Path], **kwargs: Any) -> "Policy": metadata_file = Path(path) / cls._metadata_filename() if metadata_file.is_file(): data = json.loads(rasa.shared.utils.io.read_file(metadata_file)) if (Path(path) / FEATURIZER_FILE).is_file(): featurizer = TrackerFeaturizer.load(path) data["featurizer"] = featurizer data.update(kwargs) constructor_args = rasa.shared.utils.common.arguments_of(cls) if "kwargs" not in constructor_args: if set(data.keys()).issubset(set(constructor_args)): rasa.shared.utils.io.raise_deprecation_warning( f"`{cls.__name__}.__init__` does not accept `**kwargs` " f"This is required for contextual information e.g. the flag " f"`should_finetune`.", warn_until_version="3.0.0", ) else: raise UnsupportedDialogueModelError( f"`{cls.__name__}.__init__` does not accept `**kwargs`. " f"Attempting to pass {data} to the policy. " f"This argument should be added to all policies by " f"Rasa Open Source 3.0.0." ) return cls(**data) logger.info( f"Couldn't load metadata for policy '{cls.__name__}'. " f"File '{metadata_file}' doesn't exist." ) return cls() @staticmethod def _default_predictions(domain: Domain) -> List[float]: return [0.0] * domain.num_actions @staticmethod
Apache License 2.0
interlin-q/interlin-q
interlinq/objects/operation.py
Operation.__init__
python
def __init__( self, name: str, qids: List[str] = None, cids: List[str] = None, gate: str = None, gate_param: Optional[List[Complex]] = None, computing_host_ids: Optional[List[str]] = None, pre_allocated_qubits: bool = False, hamiltonian: List[Tuple[float, List[Tuple[str, int]]]] = None, ): if name not in Constants.OPERATION_NAMES: raise (InputError("Operation is invalid")) self._name = name self._qids = qids self._cids = cids self._gate = gate self._gate_param = gate_param self._computing_host_ids = ( computing_host_ids if computing_host_ids is not None else [] ) self._pre_allocated_qubits = pre_allocated_qubits if self._name is Constants.REC_HAMILTON: if hamiltonian is None or len(hamiltonian) == 0: raise Exception( "Must send non-empty Hamiltonian terms with this operation!" ) self._hamiltonian = hamiltonian else: if hamiltonian and len(hamiltonian) > 0: warnings.warn( "You sent a list of Hamiltonians with an operation other than REC_HAMILTON" )
Returns the important things for a quantum operation Args: name (str): Name of the operation qids (list): List of qubits IDs associated to the operation. The first ID in this list will be the ID of the computing host where the operation is being performed cids (list): List of classical bit IDs associated to the operation gate (str): Name of the single or the two-qubit gate gate_param (list): parameter for rotational gates computing_host_ids (list): List of associated ID/IDS of the computing host where the operation/gate is being performed. The first computing host in the list would be the one where the operation is being performed. pre_allocated_qubits (bool): Flag to indicate if this operation is being performed on a specific pre-allocated qubit (In case of EPR pair generation)
https://github.com/interlin-q/interlin-q/blob/3441a023fedd113b8b3e68e6c6c07f6642c0344a/interlinq/objects/operation.py#L32-L83
from numbers import Complex from ..utils import Constants import warnings from typing import List, Optional, Tuple class Operation(object): I = "I" X = "X" Y = "Y" Z = "Z" CNOT = "cnot" CPHASE = "cphase" T = "T" H = "H" K = "K" RX = "rx" RY = "ry" RZ = "rz" CUSTOM = "custom_gate" CUSTOM_TWO_QUBIT = "custom_two_qubit_gate" CUSTOM_CONTROLLED = "custom_controlled_gate" MEASURE = "measure"
MIT License
docusign/docusign-python-client
docusign_esign/models/company.py
Company.bold_metadata
python
def bold_metadata(self, bold_metadata): self._bold_metadata = bold_metadata
Sets the bold_metadata of this Company. :param bold_metadata: The bold_metadata of this Company. # noqa: E501 :type: PropertyMetadata
https://github.com/docusign/docusign-python-client/blob/c6aeafff0d046fa6c10a398be83ba9e24b05d4ea/docusign_esign/models/company.py#L924-L932
import pprint import re import six from docusign_esign.client.configuration import Configuration class Company(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'anchor_allow_white_space_in_characters': 'str', 'anchor_allow_white_space_in_characters_metadata': 'PropertyMetadata', 'anchor_case_sensitive': 'str', 'anchor_case_sensitive_metadata': 'PropertyMetadata', 'anchor_horizontal_alignment': 'str', 'anchor_horizontal_alignment_metadata': 'PropertyMetadata', 'anchor_ignore_if_not_present': 'str', 'anchor_ignore_if_not_present_metadata': 'PropertyMetadata', 'anchor_match_whole_word': 'str', 'anchor_match_whole_word_metadata': 'PropertyMetadata', 'anchor_string': 'str', 'anchor_string_metadata': 'PropertyMetadata', 'anchor_tab_processor_version': 'str', 'anchor_tab_processor_version_metadata': 'PropertyMetadata', 'anchor_units': 'str', 'anchor_units_metadata': 'PropertyMetadata', 'anchor_x_offset': 'str', 'anchor_x_offset_metadata': 'PropertyMetadata', 'anchor_y_offset': 'str', 'anchor_y_offset_metadata': 'PropertyMetadata', 'bold': 'str', 'bold_metadata': 'PropertyMetadata', 'conceal_value_on_document': 'str', 'conceal_value_on_document_metadata': 'PropertyMetadata', 'conditional_parent_label': 'str', 'conditional_parent_label_metadata': 'PropertyMetadata', 'conditional_parent_value': 'str', 'conditional_parent_value_metadata': 'PropertyMetadata', 'custom_tab_id': 'str', 'custom_tab_id_metadata': 'PropertyMetadata', 'disable_auto_size': 'str', 'disable_auto_size_metadata': 'PropertyMetadata', 'document_id': 'str', 'document_id_metadata': 'PropertyMetadata', 'error_details': 'ErrorDetails', 'font': 'str', 'font_color': 'str', 'font_color_metadata': 'PropertyMetadata', 'font_metadata': 'PropertyMetadata', 'font_size': 'str', 'font_size_metadata': 'PropertyMetadata', 'form_order': 'str', 'form_order_metadata': 'PropertyMetadata', 'form_page_label': 'str', 'form_page_label_metadata': 'PropertyMetadata', 'form_page_number': 'str', 'form_page_number_metadata': 'PropertyMetadata', 'height': 'str', 'height_metadata': 'PropertyMetadata', 'italic': 'str', 'italic_metadata': 'PropertyMetadata', 'locale_policy': 'LocalePolicyTab', 'locked': 'str', 'locked_metadata': 'PropertyMetadata', 'max_length': 'str', 'max_length_metadata': 'PropertyMetadata', 'merge_field': 'MergeField', 'merge_field_xml': 'str', 'name': 'str', 'name_metadata': 'PropertyMetadata', 'original_value': 'str', 'original_value_metadata': 'PropertyMetadata', 'page_number': 'str', 'page_number_metadata': 'PropertyMetadata', 'recipient_id': 'str', 'recipient_id_guid': 'str', 'recipient_id_guid_metadata': 'PropertyMetadata', 'recipient_id_metadata': 'PropertyMetadata', 'required': 'str', 'required_metadata': 'PropertyMetadata', 'smart_contract_information': 'SmartContractInformation', 'source': 'str', 'status': 'str', 'status_metadata': 'PropertyMetadata', 'tab_group_labels': 'list[str]', 'tab_group_labels_metadata': 'PropertyMetadata', 'tab_id': 'str', 'tab_id_metadata': 'PropertyMetadata', 'tab_label': 'str', 'tab_label_metadata': 'PropertyMetadata', 'tab_order': 'str', 'tab_order_metadata': 'PropertyMetadata', 'tab_type': 'str', 'tab_type_metadata': 'PropertyMetadata', 'template_locked': 'str', 'template_locked_metadata': 'PropertyMetadata', 'template_required': 'str', 'template_required_metadata': 'PropertyMetadata', 'tooltip': 'str', 'tool_tip_metadata': 'PropertyMetadata', 'underline': 'str', 'underline_metadata': 'PropertyMetadata', 'value': 'str', 'value_metadata': 'PropertyMetadata', 'width': 'str', 'width_metadata': 'PropertyMetadata', 'x_position': 'str', 'x_position_metadata': 'PropertyMetadata', 'y_position': 'str', 'y_position_metadata': 'PropertyMetadata' } attribute_map = { 'anchor_allow_white_space_in_characters': 'anchorAllowWhiteSpaceInCharacters', 'anchor_allow_white_space_in_characters_metadata': 'anchorAllowWhiteSpaceInCharactersMetadata', 'anchor_case_sensitive': 'anchorCaseSensitive', 'anchor_case_sensitive_metadata': 'anchorCaseSensitiveMetadata', 'anchor_horizontal_alignment': 'anchorHorizontalAlignment', 'anchor_horizontal_alignment_metadata': 'anchorHorizontalAlignmentMetadata', 'anchor_ignore_if_not_present': 'anchorIgnoreIfNotPresent', 'anchor_ignore_if_not_present_metadata': 'anchorIgnoreIfNotPresentMetadata', 'anchor_match_whole_word': 'anchorMatchWholeWord', 'anchor_match_whole_word_metadata': 'anchorMatchWholeWordMetadata', 'anchor_string': 'anchorString', 'anchor_string_metadata': 'anchorStringMetadata', 'anchor_tab_processor_version': 'anchorTabProcessorVersion', 'anchor_tab_processor_version_metadata': 'anchorTabProcessorVersionMetadata', 'anchor_units': 'anchorUnits', 'anchor_units_metadata': 'anchorUnitsMetadata', 'anchor_x_offset': 'anchorXOffset', 'anchor_x_offset_metadata': 'anchorXOffsetMetadata', 'anchor_y_offset': 'anchorYOffset', 'anchor_y_offset_metadata': 'anchorYOffsetMetadata', 'bold': 'bold', 'bold_metadata': 'boldMetadata', 'conceal_value_on_document': 'concealValueOnDocument', 'conceal_value_on_document_metadata': 'concealValueOnDocumentMetadata', 'conditional_parent_label': 'conditionalParentLabel', 'conditional_parent_label_metadata': 'conditionalParentLabelMetadata', 'conditional_parent_value': 'conditionalParentValue', 'conditional_parent_value_metadata': 'conditionalParentValueMetadata', 'custom_tab_id': 'customTabId', 'custom_tab_id_metadata': 'customTabIdMetadata', 'disable_auto_size': 'disableAutoSize', 'disable_auto_size_metadata': 'disableAutoSizeMetadata', 'document_id': 'documentId', 'document_id_metadata': 'documentIdMetadata', 'error_details': 'errorDetails', 'font': 'font', 'font_color': 'fontColor', 'font_color_metadata': 'fontColorMetadata', 'font_metadata': 'fontMetadata', 'font_size': 'fontSize', 'font_size_metadata': 'fontSizeMetadata', 'form_order': 'formOrder', 'form_order_metadata': 'formOrderMetadata', 'form_page_label': 'formPageLabel', 'form_page_label_metadata': 'formPageLabelMetadata', 'form_page_number': 'formPageNumber', 'form_page_number_metadata': 'formPageNumberMetadata', 'height': 'height', 'height_metadata': 'heightMetadata', 'italic': 'italic', 'italic_metadata': 'italicMetadata', 'locale_policy': 'localePolicy', 'locked': 'locked', 'locked_metadata': 'lockedMetadata', 'max_length': 'maxLength', 'max_length_metadata': 'maxLengthMetadata', 'merge_field': 'mergeField', 'merge_field_xml': 'mergeFieldXml', 'name': 'name', 'name_metadata': 'nameMetadata', 'original_value': 'originalValue', 'original_value_metadata': 'originalValueMetadata', 'page_number': 'pageNumber', 'page_number_metadata': 'pageNumberMetadata', 'recipient_id': 'recipientId', 'recipient_id_guid': 'recipientIdGuid', 'recipient_id_guid_metadata': 'recipientIdGuidMetadata', 'recipient_id_metadata': 'recipientIdMetadata', 'required': 'required', 'required_metadata': 'requiredMetadata', 'smart_contract_information': 'smartContractInformation', 'source': 'source', 'status': 'status', 'status_metadata': 'statusMetadata', 'tab_group_labels': 'tabGroupLabels', 'tab_group_labels_metadata': 'tabGroupLabelsMetadata', 'tab_id': 'tabId', 'tab_id_metadata': 'tabIdMetadata', 'tab_label': 'tabLabel', 'tab_label_metadata': 'tabLabelMetadata', 'tab_order': 'tabOrder', 'tab_order_metadata': 'tabOrderMetadata', 'tab_type': 'tabType', 'tab_type_metadata': 'tabTypeMetadata', 'template_locked': 'templateLocked', 'template_locked_metadata': 'templateLockedMetadata', 'template_required': 'templateRequired', 'template_required_metadata': 'templateRequiredMetadata', 'tooltip': 'tooltip', 'tool_tip_metadata': 'toolTipMetadata', 'underline': 'underline', 'underline_metadata': 'underlineMetadata', 'value': 'value', 'value_metadata': 'valueMetadata', 'width': 'width', 'width_metadata': 'widthMetadata', 'x_position': 'xPosition', 'x_position_metadata': 'xPositionMetadata', 'y_position': 'yPosition', 'y_position_metadata': 'yPositionMetadata' } def __init__(self, _configuration=None, **kwargs): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._anchor_allow_white_space_in_characters = None self._anchor_allow_white_space_in_characters_metadata = None self._anchor_case_sensitive = None self._anchor_case_sensitive_metadata = None self._anchor_horizontal_alignment = None self._anchor_horizontal_alignment_metadata = None self._anchor_ignore_if_not_present = None self._anchor_ignore_if_not_present_metadata = None self._anchor_match_whole_word = None self._anchor_match_whole_word_metadata = None self._anchor_string = None self._anchor_string_metadata = None self._anchor_tab_processor_version = None self._anchor_tab_processor_version_metadata = None self._anchor_units = None self._anchor_units_metadata = None self._anchor_x_offset = None self._anchor_x_offset_metadata = None self._anchor_y_offset = None self._anchor_y_offset_metadata = None self._bold = None self._bold_metadata = None self._conceal_value_on_document = None self._conceal_value_on_document_metadata = None self._conditional_parent_label = None self._conditional_parent_label_metadata = None self._conditional_parent_value = None self._conditional_parent_value_metadata = None self._custom_tab_id = None self._custom_tab_id_metadata = None self._disable_auto_size = None self._disable_auto_size_metadata = None self._document_id = None self._document_id_metadata = None self._error_details = None self._font = None self._font_color = None self._font_color_metadata = None self._font_metadata = None self._font_size = None self._font_size_metadata = None self._form_order = None self._form_order_metadata = None self._form_page_label = None self._form_page_label_metadata = None self._form_page_number = None self._form_page_number_metadata = None self._height = None self._height_metadata = None self._italic = None self._italic_metadata = None self._locale_policy = None self._locked = None self._locked_metadata = None self._max_length = None self._max_length_metadata = None self._merge_field = None self._merge_field_xml = None self._name = None self._name_metadata = None self._original_value = None self._original_value_metadata = None self._page_number = None self._page_number_metadata = None self._recipient_id = None self._recipient_id_guid = None self._recipient_id_guid_metadata = None self._recipient_id_metadata = None self._required = None self._required_metadata = None self._smart_contract_information = None self._source = None self._status = None self._status_metadata = None self._tab_group_labels = None self._tab_group_labels_metadata = None self._tab_id = None self._tab_id_metadata = None self._tab_label = None self._tab_label_metadata = None self._tab_order = None self._tab_order_metadata = None self._tab_type = None self._tab_type_metadata = None self._template_locked = None self._template_locked_metadata = None self._template_required = None self._template_required_metadata = None self._tooltip = None self._tool_tip_metadata = None self._underline = None self._underline_metadata = None self._value = None self._value_metadata = None self._width = None self._width_metadata = None self._x_position = None self._x_position_metadata = None self._y_position = None self._y_position_metadata = None self.discriminator = None setattr(self, "_{}".format('anchor_allow_white_space_in_characters'), kwargs.get('anchor_allow_white_space_in_characters', None)) setattr(self, "_{}".format('anchor_allow_white_space_in_characters_metadata'), kwargs.get('anchor_allow_white_space_in_characters_metadata', None)) setattr(self, "_{}".format('anchor_case_sensitive'), kwargs.get('anchor_case_sensitive', None)) setattr(self, "_{}".format('anchor_case_sensitive_metadata'), kwargs.get('anchor_case_sensitive_metadata', None)) setattr(self, "_{}".format('anchor_horizontal_alignment'), kwargs.get('anchor_horizontal_alignment', None)) setattr(self, "_{}".format('anchor_horizontal_alignment_metadata'), kwargs.get('anchor_horizontal_alignment_metadata', None)) setattr(self, "_{}".format('anchor_ignore_if_not_present'), kwargs.get('anchor_ignore_if_not_present', None)) setattr(self, "_{}".format('anchor_ignore_if_not_present_metadata'), kwargs.get('anchor_ignore_if_not_present_metadata', None)) setattr(self, "_{}".format('anchor_match_whole_word'), kwargs.get('anchor_match_whole_word', None)) setattr(self, "_{}".format('anchor_match_whole_word_metadata'), kwargs.get('anchor_match_whole_word_metadata', None)) setattr(self, "_{}".format('anchor_string'), kwargs.get('anchor_string', None)) setattr(self, "_{}".format('anchor_string_metadata'), kwargs.get('anchor_string_metadata', None)) setattr(self, "_{}".format('anchor_tab_processor_version'), kwargs.get('anchor_tab_processor_version', None)) setattr(self, "_{}".format('anchor_tab_processor_version_metadata'), kwargs.get('anchor_tab_processor_version_metadata', None)) setattr(self, "_{}".format('anchor_units'), kwargs.get('anchor_units', None)) setattr(self, "_{}".format('anchor_units_metadata'), kwargs.get('anchor_units_metadata', None)) setattr(self, "_{}".format('anchor_x_offset'), kwargs.get('anchor_x_offset', None)) setattr(self, "_{}".format('anchor_x_offset_metadata'), kwargs.get('anchor_x_offset_metadata', None)) setattr(self, "_{}".format('anchor_y_offset'), kwargs.get('anchor_y_offset', None)) setattr(self, "_{}".format('anchor_y_offset_metadata'), kwargs.get('anchor_y_offset_metadata', None)) setattr(self, "_{}".format('bold'), kwargs.get('bold', None)) setattr(self, "_{}".format('bold_metadata'), kwargs.get('bold_metadata', None)) setattr(self, "_{}".format('conceal_value_on_document'), kwargs.get('conceal_value_on_document', None)) setattr(self, "_{}".format('conceal_value_on_document_metadata'), kwargs.get('conceal_value_on_document_metadata', None)) setattr(self, "_{}".format('conditional_parent_label'), kwargs.get('conditional_parent_label', None)) setattr(self, "_{}".format('conditional_parent_label_metadata'), kwargs.get('conditional_parent_label_metadata', None)) setattr(self, "_{}".format('conditional_parent_value'), kwargs.get('conditional_parent_value', None)) setattr(self, "_{}".format('conditional_parent_value_metadata'), kwargs.get('conditional_parent_value_metadata', None)) setattr(self, "_{}".format('custom_tab_id'), kwargs.get('custom_tab_id', None)) setattr(self, "_{}".format('custom_tab_id_metadata'), kwargs.get('custom_tab_id_metadata', None)) setattr(self, "_{}".format('disable_auto_size'), kwargs.get('disable_auto_size', None)) setattr(self, "_{}".format('disable_auto_size_metadata'), kwargs.get('disable_auto_size_metadata', None)) setattr(self, "_{}".format('document_id'), kwargs.get('document_id', None)) setattr(self, "_{}".format('document_id_metadata'), kwargs.get('document_id_metadata', None)) setattr(self, "_{}".format('error_details'), kwargs.get('error_details', None)) setattr(self, "_{}".format('font'), kwargs.get('font', None)) setattr(self, "_{}".format('font_color'), kwargs.get('font_color', None)) setattr(self, "_{}".format('font_color_metadata'), kwargs.get('font_color_metadata', None)) setattr(self, "_{}".format('font_metadata'), kwargs.get('font_metadata', None)) setattr(self, "_{}".format('font_size'), kwargs.get('font_size', None)) setattr(self, "_{}".format('font_size_metadata'), kwargs.get('font_size_metadata', None)) setattr(self, "_{}".format('form_order'), kwargs.get('form_order', None)) setattr(self, "_{}".format('form_order_metadata'), kwargs.get('form_order_metadata', None)) setattr(self, "_{}".format('form_page_label'), kwargs.get('form_page_label', None)) setattr(self, "_{}".format('form_page_label_metadata'), kwargs.get('form_page_label_metadata', None)) setattr(self, "_{}".format('form_page_number'), kwargs.get('form_page_number', None)) setattr(self, "_{}".format('form_page_number_metadata'), kwargs.get('form_page_number_metadata', None)) setattr(self, "_{}".format('height'), kwargs.get('height', None)) setattr(self, "_{}".format('height_metadata'), kwargs.get('height_metadata', None)) setattr(self, "_{}".format('italic'), kwargs.get('italic', None)) setattr(self, "_{}".format('italic_metadata'), kwargs.get('italic_metadata', None)) setattr(self, "_{}".format('locale_policy'), kwargs.get('locale_policy', None)) setattr(self, "_{}".format('locked'), kwargs.get('locked', None)) setattr(self, "_{}".format('locked_metadata'), kwargs.get('locked_metadata', None)) setattr(self, "_{}".format('max_length'), kwargs.get('max_length', None)) setattr(self, "_{}".format('max_length_metadata'), kwargs.get('max_length_metadata', None)) setattr(self, "_{}".format('merge_field'), kwargs.get('merge_field', None)) setattr(self, "_{}".format('merge_field_xml'), kwargs.get('merge_field_xml', None)) setattr(self, "_{}".format('name'), kwargs.get('name', None)) setattr(self, "_{}".format('name_metadata'), kwargs.get('name_metadata', None)) setattr(self, "_{}".format('original_value'), kwargs.get('original_value', None)) setattr(self, "_{}".format('original_value_metadata'), kwargs.get('original_value_metadata', None)) setattr(self, "_{}".format('page_number'), kwargs.get('page_number', None)) setattr(self, "_{}".format('page_number_metadata'), kwargs.get('page_number_metadata', None)) setattr(self, "_{}".format('recipient_id'), kwargs.get('recipient_id', None)) setattr(self, "_{}".format('recipient_id_guid'), kwargs.get('recipient_id_guid', None)) setattr(self, "_{}".format('recipient_id_guid_metadata'), kwargs.get('recipient_id_guid_metadata', None)) setattr(self, "_{}".format('recipient_id_metadata'), kwargs.get('recipient_id_metadata', None)) setattr(self, "_{}".format('required'), kwargs.get('required', None)) setattr(self, "_{}".format('required_metadata'), kwargs.get('required_metadata', None)) setattr(self, "_{}".format('smart_contract_information'), kwargs.get('smart_contract_information', None)) setattr(self, "_{}".format('source'), kwargs.get('source', None)) setattr(self, "_{}".format('status'), kwargs.get('status', None)) setattr(self, "_{}".format('status_metadata'), kwargs.get('status_metadata', None)) setattr(self, "_{}".format('tab_group_labels'), kwargs.get('tab_group_labels', None)) setattr(self, "_{}".format('tab_group_labels_metadata'), kwargs.get('tab_group_labels_metadata', None)) setattr(self, "_{}".format('tab_id'), kwargs.get('tab_id', None)) setattr(self, "_{}".format('tab_id_metadata'), kwargs.get('tab_id_metadata', None)) setattr(self, "_{}".format('tab_label'), kwargs.get('tab_label', None)) setattr(self, "_{}".format('tab_label_metadata'), kwargs.get('tab_label_metadata', None)) setattr(self, "_{}".format('tab_order'), kwargs.get('tab_order', None)) setattr(self, "_{}".format('tab_order_metadata'), kwargs.get('tab_order_metadata', None)) setattr(self, "_{}".format('tab_type'), kwargs.get('tab_type', None)) setattr(self, "_{}".format('tab_type_metadata'), kwargs.get('tab_type_metadata', None)) setattr(self, "_{}".format('template_locked'), kwargs.get('template_locked', None)) setattr(self, "_{}".format('template_locked_metadata'), kwargs.get('template_locked_metadata', None)) setattr(self, "_{}".format('template_required'), kwargs.get('template_required', None)) setattr(self, "_{}".format('template_required_metadata'), kwargs.get('template_required_metadata', None)) setattr(self, "_{}".format('tooltip'), kwargs.get('tooltip', None)) setattr(self, "_{}".format('tool_tip_metadata'), kwargs.get('tool_tip_metadata', None)) setattr(self, "_{}".format('underline'), kwargs.get('underline', None)) setattr(self, "_{}".format('underline_metadata'), kwargs.get('underline_metadata', None)) setattr(self, "_{}".format('value'), kwargs.get('value', None)) setattr(self, "_{}".format('value_metadata'), kwargs.get('value_metadata', None)) setattr(self, "_{}".format('width'), kwargs.get('width', None)) setattr(self, "_{}".format('width_metadata'), kwargs.get('width_metadata', None)) setattr(self, "_{}".format('x_position'), kwargs.get('x_position', None)) setattr(self, "_{}".format('x_position_metadata'), kwargs.get('x_position_metadata', None)) setattr(self, "_{}".format('y_position'), kwargs.get('y_position', None)) setattr(self, "_{}".format('y_position_metadata'), kwargs.get('y_position_metadata', None)) @property def anchor_allow_white_space_in_characters(self): return self._anchor_allow_white_space_in_characters @anchor_allow_white_space_in_characters.setter def anchor_allow_white_space_in_characters(self, anchor_allow_white_space_in_characters): self._anchor_allow_white_space_in_characters = anchor_allow_white_space_in_characters @property def anchor_allow_white_space_in_characters_metadata(self): return self._anchor_allow_white_space_in_characters_metadata @anchor_allow_white_space_in_characters_metadata.setter def anchor_allow_white_space_in_characters_metadata(self, anchor_allow_white_space_in_characters_metadata): self._anchor_allow_white_space_in_characters_metadata = anchor_allow_white_space_in_characters_metadata @property def anchor_case_sensitive(self): return self._anchor_case_sensitive @anchor_case_sensitive.setter def anchor_case_sensitive(self, anchor_case_sensitive): self._anchor_case_sensitive = anchor_case_sensitive @property def anchor_case_sensitive_metadata(self): return self._anchor_case_sensitive_metadata @anchor_case_sensitive_metadata.setter def anchor_case_sensitive_metadata(self, anchor_case_sensitive_metadata): self._anchor_case_sensitive_metadata = anchor_case_sensitive_metadata @property def anchor_horizontal_alignment(self): return self._anchor_horizontal_alignment @anchor_horizontal_alignment.setter def anchor_horizontal_alignment(self, anchor_horizontal_alignment): self._anchor_horizontal_alignment = anchor_horizontal_alignment @property def anchor_horizontal_alignment_metadata(self): return self._anchor_horizontal_alignment_metadata @anchor_horizontal_alignment_metadata.setter def anchor_horizontal_alignment_metadata(self, anchor_horizontal_alignment_metadata): self._anchor_horizontal_alignment_metadata = anchor_horizontal_alignment_metadata @property def anchor_ignore_if_not_present(self): return self._anchor_ignore_if_not_present @anchor_ignore_if_not_present.setter def anchor_ignore_if_not_present(self, anchor_ignore_if_not_present): self._anchor_ignore_if_not_present = anchor_ignore_if_not_present @property def anchor_ignore_if_not_present_metadata(self): return self._anchor_ignore_if_not_present_metadata @anchor_ignore_if_not_present_metadata.setter def anchor_ignore_if_not_present_metadata(self, anchor_ignore_if_not_present_metadata): self._anchor_ignore_if_not_present_metadata = anchor_ignore_if_not_present_metadata @property def anchor_match_whole_word(self): return self._anchor_match_whole_word @anchor_match_whole_word.setter def anchor_match_whole_word(self, anchor_match_whole_word): self._anchor_match_whole_word = anchor_match_whole_word @property def anchor_match_whole_word_metadata(self): return self._anchor_match_whole_word_metadata @anchor_match_whole_word_metadata.setter def anchor_match_whole_word_metadata(self, anchor_match_whole_word_metadata): self._anchor_match_whole_word_metadata = anchor_match_whole_word_metadata @property def anchor_string(self): return self._anchor_string @anchor_string.setter def anchor_string(self, anchor_string): self._anchor_string = anchor_string @property def anchor_string_metadata(self): return self._anchor_string_metadata @anchor_string_metadata.setter def anchor_string_metadata(self, anchor_string_metadata): self._anchor_string_metadata = anchor_string_metadata @property def anchor_tab_processor_version(self): return self._anchor_tab_processor_version @anchor_tab_processor_version.setter def anchor_tab_processor_version(self, anchor_tab_processor_version): self._anchor_tab_processor_version = anchor_tab_processor_version @property def anchor_tab_processor_version_metadata(self): return self._anchor_tab_processor_version_metadata @anchor_tab_processor_version_metadata.setter def anchor_tab_processor_version_metadata(self, anchor_tab_processor_version_metadata): self._anchor_tab_processor_version_metadata = anchor_tab_processor_version_metadata @property def anchor_units(self): return self._anchor_units @anchor_units.setter def anchor_units(self, anchor_units): self._anchor_units = anchor_units @property def anchor_units_metadata(self): return self._anchor_units_metadata @anchor_units_metadata.setter def anchor_units_metadata(self, anchor_units_metadata): self._anchor_units_metadata = anchor_units_metadata @property def anchor_x_offset(self): return self._anchor_x_offset @anchor_x_offset.setter def anchor_x_offset(self, anchor_x_offset): self._anchor_x_offset = anchor_x_offset @property def anchor_x_offset_metadata(self): return self._anchor_x_offset_metadata @anchor_x_offset_metadata.setter def anchor_x_offset_metadata(self, anchor_x_offset_metadata): self._anchor_x_offset_metadata = anchor_x_offset_metadata @property def anchor_y_offset(self): return self._anchor_y_offset @anchor_y_offset.setter def anchor_y_offset(self, anchor_y_offset): self._anchor_y_offset = anchor_y_offset @property def anchor_y_offset_metadata(self): return self._anchor_y_offset_metadata @anchor_y_offset_metadata.setter def anchor_y_offset_metadata(self, anchor_y_offset_metadata): self._anchor_y_offset_metadata = anchor_y_offset_metadata @property def bold(self): return self._bold @bold.setter def bold(self, bold): self._bold = bold @property def bold_metadata(self): return self._bold_metadata @bold_metadata.setter
MIT License
identitypython/pyjwkest
src/jwkest/extra.py
pkcs5trim
python
def pkcs5trim(x): n = unpack('B', x[-1:])[0] if n > 16: raise Exception("Mal-formed PKCS#5 padding") return x[:-n]
Trim PKCS#5 padding from an octet string :type x: bytes :rtype: bytes
https://github.com/identitypython/pyjwkest/blob/880fe789ff74a6b927151ddba6e9796fe88a7ce1/src/jwkest/extra.py#L46-L58
from __future__ import division from jwkest.jws import JWSException try: from builtins import bytes except ImportError: pass from math import ceil from struct import pack, unpack from Cryptodome.Cipher import AES from Cryptodome.Hash import SHA256 from Cryptodome.Hash import SHA384 from Cryptodome.Hash import SHA512 from Cryptodome.Hash import HMAC LENMET = { 32: (16, SHA256), 48: (24, SHA384), 64: (32, SHA512) } class VerificationFailure(JWSException): pass def pkcs5pad(x): n = 16 - len(x) % 16 if n == 0: n = 16 ns = pack('B', n) return x + (ns * n)
Apache License 2.0
gussand/anubis
api/anubis/utils/github/parse.py
parse_github_org_name
python
def parse_github_org_name(org_url: str) -> Optional[str]: r = parse("https://github.com/{}", org_url) if r is None: return '' return r[1].strip().rstrip('/')
Get org name from a github url "https://github.com/os3224" -> "os3224"
https://github.com/gussand/anubis/blob/5ff4e293b84049af92b53b3bcc264c7782ffb9e6/api/anubis/utils/github/parse.py#L6-L18
from typing import Optional from parse import parse
MIT License
botfront/rasa-for-botfront
rasa/core/channels/socketio.py
SocketIOOutput.send_image_url
python
async def send_image_url( self, recipient_id: Text, image: Text, **kwargs: Any ) -> None: message = {"attachment": {"type": "image", "payload": {"src": image}}} await self._send_message(recipient_id, message)
Sends an image to the output
https://github.com/botfront/rasa-for-botfront/blob/6e0e48d0059e197b5f686df1e27935769c3641b7/rasa/core/channels/socketio.py#L48-L54
import logging import uuid from typing import Any, Awaitable, Callable, Dict, Iterable, List, Optional, Text from rasa.core.channels.channel import InputChannel, OutputChannel, UserMessage import rasa.shared.utils.io from sanic import Blueprint, response from sanic.request import Request from sanic.response import HTTPResponse from socketio import AsyncServer logger = logging.getLogger(__name__) class SocketBlueprint(Blueprint): def __init__(self, sio: AsyncServer, socketio_path, *args, **kwargs): self.sio = sio self.socketio_path = socketio_path super().__init__(*args, **kwargs) def register(self, app, options) -> None: self.sio.attach(app, self.socketio_path) super().register(app, options) class SocketIOOutput(OutputChannel): @classmethod def name(cls) -> Text: return "socketio" def __init__(self, sio: AsyncServer, bot_message_evt: Text) -> None: self.sio = sio self.bot_message_evt = bot_message_evt async def _send_message(self, socket_id: Text, response: Any) -> None: await self.sio.emit(self.bot_message_evt, response, room=socket_id) async def send_text_message( self, recipient_id: Text, text: Text, **kwargs: Any ) -> None: for message_part in text.strip().split("\n\n"): await self._send_message(recipient_id, {"text": message_part})
Apache License 2.0
sfanous/pyecobee
pyecobee/objects/thermostat.py
Thermostat.house_details
python
def house_details(self, house_details): self._house_details = house_details
Sets the house_details attribute of this Thermostat instance. :param house_details: The house_details value to set for the house_details attribute of this Thermostat instance. :type: HouseDetails
https://github.com/sfanous/pyecobee/blob/3d6b4aec3c6bc9b796aa3d3fd6626909ffdbac13/pyecobee/objects/thermostat.py#L627-L636
from pyecobee.ecobee_object import EcobeeObject class Thermostat(EcobeeObject): __slots__ = [ '_identifier', '_name', '_thermostat_rev', '_is_registered', '_model_number', '_brand', '_features', '_last_modified', '_thermostat_time', '_utc_time', '_audio', '_alerts', '_reminders', '_settings', '_runtime', '_extended_runtime', '_electricity', '_devices', '_location', '_energy', '_technician', '_utility', '_management', '_weather', '_events', '_program', '_house_details', '_oem_cfg', '_equipment_status', '_notification_settings', '_privacy', '_version', '_security_settings', '_filter_subscription', '_remote_sensors', ] attribute_name_map = { 'identifier': 'identifier', 'name': 'name', 'thermostat_rev': 'thermostatRev', 'thermostatRev': 'thermostat_rev', 'is_registered': 'isRegistered', 'isRegistered': 'is_registered', 'model_number': 'modelNumber', 'modelNumber': 'model_number', 'brand': 'brand', 'features': 'features', 'last_modified': 'lastModified', 'lastModified': 'last_modified', 'thermostat_time': 'thermostatTime', 'thermostatTime': 'thermostat_time', 'utc_time': 'utcTime', 'utcTime': 'utc_time', 'audio': 'audio', 'alerts': 'alerts', 'reminders': 'reminders', 'settings': 'settings', 'runtime': 'runtime', 'extended_runtime': 'extendedRuntime', 'extendedRuntime': 'extended_runtime', 'electricity': 'electricity', 'devices': 'devices', 'location': 'location', 'energy': 'energy', 'technician': 'technician', 'utility': 'utility', 'management': 'management', 'weather': 'weather', 'events': 'events', 'program': 'program', 'house_details': 'houseDetails', 'houseDetails': 'house_details', 'oem_cfg': 'oemCfg', 'oemCfg': 'oem_cfg', 'equipment_status': 'equipmentStatus', 'equipmentStatus': 'equipment_status', 'notification_settings': 'notificationSettings', 'notificationSettings': 'notification_settings', 'privacy': 'privacy', 'version': 'version', 'security_settings': 'securitySettings', 'securitySettings': 'security_settings', 'filter_subscription': 'filterSubscription', 'filterSubscription': 'filter_subscription', 'remote_sensors': 'remoteSensors', 'remoteSensors': 'remote_sensors', } attribute_type_map = { 'identifier': 'six.text_type', 'name': 'six.text_type', 'thermostat_rev': 'six.text_type', 'is_registered': 'bool', 'model_number': 'six.text_type', 'brand': 'six.text_type', 'features': 'six.text_type', 'last_modified': 'six.text_type', 'thermostat_time': 'six.text_type', 'utc_time': 'six.text_type', 'audio': 'Audio', 'alerts': 'List[Alert]', 'reminders': 'List[ThermostatReminder2]', 'settings': 'Settings', 'runtime': 'Runtime', 'extended_runtime': 'ExtendedRuntime', 'electricity': 'Electricity', 'devices': 'List[Device]', 'location': 'Location', 'energy': 'Energy', 'technician': 'Technician', 'utility': 'Utility', 'management': 'Management', 'weather': 'Weather', 'events': 'List[Event]', 'program': 'Program', 'house_details': 'HouseDetails', 'oem_cfg': 'ThermostatOemCfg', 'equipment_status': 'six.text_type', 'notification_settings': 'NotificationSettings', 'privacy': 'ThermostatPrivacy', 'version': 'Version', 'security_settings': 'SecuritySettings', 'filter_subscription': 'ApiFilterSubscription', 'remote_sensors': 'List[RemoteSensor]', } def __init__( self, identifier, name=None, thermostat_rev=None, is_registered=None, model_number=None, brand=None, features=None, last_modified=None, thermostat_time=None, utc_time=None, audio=None, alerts=None, reminders=None, settings=None, runtime=None, extended_runtime=None, electricity=None, devices=None, location=None, energy=None, technician=None, utility=None, management=None, weather=None, events=None, program=None, house_details=None, oem_cfg=None, equipment_status=None, notification_settings=None, privacy=None, version=None, security_settings=None, filter_subscription=None, remote_sensors=None, ): self._identifier = identifier self._name = name self._thermostat_rev = thermostat_rev self._is_registered = is_registered self._model_number = model_number self._brand = brand self._features = features self._last_modified = last_modified self._thermostat_time = thermostat_time self._utc_time = utc_time self._audio = audio self._alerts = alerts self._reminders = reminders self._settings = settings self._runtime = runtime self._extended_runtime = extended_runtime self._electricity = electricity self._devices = devices self._location = location self._energy = energy self._technician = technician self._utility = utility self._management = management self._weather = weather self._events = events self._program = program self._house_details = house_details self._oem_cfg = oem_cfg self._equipment_status = equipment_status self._notification_settings = notification_settings self._privacy = privacy self._version = version self._security_settings = security_settings self._filter_subscription = filter_subscription self._remote_sensors = remote_sensors @property def identifier(self): return self._identifier @property def name(self): return self._name @name.setter def name(self, name): self._name = name @property def thermostat_rev(self): return self._thermostat_rev @property def is_registered(self): return self._is_registered @property def model_number(self): return self._model_number @property def brand(self): return self._brand @property def features(self): return self._features @property def last_modified(self): return self._last_modified @property def thermostat_time(self): return self._thermostat_time @property def utc_time(self): return self._utc_time @property def audio(self): return self._audio @audio.setter def audio(self, audio): self._audio = audio @property def alerts(self): return self._alerts @property def reminders(self): return self._reminders @property def settings(self): return self._settings @settings.setter def settings(self, settings): self._settings = settings @property def runtime(self): return self._runtime @property def extended_runtime(self): return self._extended_runtime @property def electricity(self): return self._electricity @property def devices(self): return self._devices @property def location(self): return self._location @location.setter def location(self, location): self._location = location @property def energy(self): return self._energy @energy.setter def energy(self, energy): self._energy = energy @property def technician(self): return self._technician @property def utility(self): return self._utility @property def management(self): return self._management @property def weather(self): return self._weather @property def events(self): return self._events @property def program(self): return self._program @program.setter def program(self, program): self._program = program @property def house_details(self): return self._house_details @house_details.setter
MIT License
sbg/mitty
mitty/lib/vcfio.py
prepare_variant_file
python
def prepare_variant_file(fname_in, sample, bed_fname, fname_out, write_mode='w'): logger.debug('Starting filtering ...') t0 = time.time() mode = 'rb' if fname_in.endswith('bcf') else 'r' vcf_in = pysam.VariantFile(fname_in, mode) vcf_in.subset_samples([sample]) vcf_out = pysam.VariantFile(fname_out, mode=write_mode, header=vcf_in.header) processed_cnt, exclude_cnt, include_cnt = 0, 0, 0 contig_dict = set() for region in read_bed(bed_fname): logger.debug('Filtering {}'.format(region)) n, this_include_cnt = -1, 0 empty_gt = None if region[0] not in contig_dict: empty_gt = fetch_first_variant_in_contig_as_empty(vcf_in, region[0]) contig_dict.add(region[0]) v_check = UnusableVariantFilter(sniff_ploidy(vcf_in, contig=region[0])) for n, v in enumerate(vcf_in.fetch(contig=region[0], start=region[1], stop=region[2])): if not any(v.samples.values()[0]['GT']): continue if v_check.unusable(v): exclude_cnt += 1 continue vcf_out.write(v) this_include_cnt += 1 if this_include_cnt == 0 and empty_gt is not None: vcf_out.write(empty_gt) include_cnt += this_include_cnt processed_cnt += (n + 1) logger.debug('Processed {} variants'.format(processed_cnt)) logger.debug('Sample had {} variants'.format(exclude_cnt + include_cnt)) logger.debug('Discarded {} variants'.format(exclude_cnt)) t1 = time.time() logger.debug('Took {} s'.format(t1 - t0))
Prepare a variant file with only the given sample, complex and illegal variant calls filtered out, and restricted to the given bed file :param fname_in: :param sample: :param bed_fname: :param fname_out: :return: - output is to file
https://github.com/sbg/mitty/blob/e299649f71b78da036b25a96cec3440764095c87/mitty/lib/vcfio.py#L207-L255
import logging import time import pysam from mitty.lib.bedfile import read_bed logger = logging.getLogger(__name__) class Variant(object): __slots__ = ('pos', 'ref', 'alt', 'cigarop', 'oplen') def __init__(self, pos, ref, alt, cigarop, oplen): self.pos = pos self.ref = ref self.alt = alt self.cigarop = cigarop self.oplen = oplen def tuple(self): return self.pos, self.ref, self.alt, self.cigarop, self.oplen def __repr__(self): return self.tuple().__repr__() def load_variant_file(fname, sample, bed_fname): mode = 'rb' if fname.endswith('bcf') else 'r' vcf_fp = pysam.VariantFile(fname, mode) vcf_fp.subset_samples([sample]) return [ split_copies(region, [v for v in vcf_fp.fetch(contig=region[0], start=region[1], stop=region[2])], sniff_ploidy(vcf_fp, region[0])) for region in read_bed(bed_fname) ] def sniff_ploidy(vcf_fp, contig): v = next(vcf_fp.fetch(contig=contig), None) ploidy = 2 if v is not None: ploidy = len(v.samples[0]['GT']) logger.debug( 'Contig: {}, ploidy: {} {}'.format(contig, ploidy, '(Assumed. Contig was empty)' if v is None else '')) return ploidy def fetch_first_variant_in_contig_as_empty(vcf_fp, contig): v = next(vcf_fp.fetch(contig=contig), None) if v is not None: v.samples[0]['GT'] = (0,) * len(v.samples[0]['GT']) return v def split_copies(region, vl, ploidy): return { 'region': region, 'v': [ parse_vl(vl, cpy=cpy, ploidy=ploidy) for cpy in range(ploidy) ] } def parse_vl(vl, cpy, ploidy): v_check = UnusableVariantFilter(ploidy) return list(filter(None, (parse(v, cpy=cpy, v_check=v_check) for v in vl))) def parse(v, cpy, v_check): if v.samples[0]['GT'][cpy] == 0: return None if v_check.unusable(v): logger.error("Unusable variants present in VCF. Please filter or refactor these.") raise ValueError("Unusable variants present in VCF. Please filter or refactor these.") alt = v.samples[0].alleles[cpy] l_r, l_a = len(v.ref), len(alt) if l_r == 1: if l_a == 1: op, op_len = 'X', 0 else: op, op_len = 'I', l_a - l_r elif l_a == 1: op, op_len = 'D', l_r - l_a else: raise ValueError("Complex variants present in VCF. Please filter or refactor these.") return Variant(v.pos, v.ref, v.samples[0].alleles[cpy], op, op_len) class UnusableVariantFilter: def __init__(self, ploidy): self.p_overlap = [0] * ploidy self.last_variant = [(0, '', '') for _ in range(ploidy)] def unusable(self, v): var = v.samples.values()[0] is_unusable = self._complex_variant(v) or self._angle_bracketed_id(v, var) or self._breakend_replacement(v) or self._illegal_overlap(v, var, self.p_overlap) or self._duplicate_variant(v, var, self.last_variant) if not is_unusable: for n, (g, alt) in enumerate(zip(var['GT'], var.alleles)): if g: self.p_overlap[n] = v.stop - 1 self.last_variant[n] = (v.pos, v.ref, alt) return is_unusable @staticmethod def _complex_variant(_v): for alt in _v.alts: if _v.rlen > 1 and len(alt) > 1: logger.debug('Complex variant {}:{} {} -> {}'.format(_v.contig, _v.pos, _v.ref, _v.alts)) return True return False @staticmethod def _angle_bracketed_id(_v, var): for alt in var.alleles: if alt[0] == '<': logger.debug('Angle bracketed variant entry {}:{} {} -> {}'.format(_v.contig, _v.pos, _v.ref, var.alleles)) return True return False @staticmethod def _breakend_replacement(_v): if _v.info.get('SVTYPE', None) == 'BND': logger.debug('Breakend entry {}:{} {} -> {}'.format(_v.contig, _v.pos, _v.ref, _v.alts)) return True return False @staticmethod def _illegal_overlap(_v, var, _p_overlap): is_illegal = False for n, (g, alt, po) in enumerate(zip(var['GT'], var.alleles, _p_overlap)): if g: if len(alt) == len(_v.ref) == 1: start = _v.start else: start = _v.start + 1 if start <= po: is_illegal = True logger.debug('Illegal overlap {}:{} {} -> {} (previous variant ends at {})'.format(_v.contig, _v.pos, _v.ref, _v.alts, po + 1)) break return is_illegal @staticmethod def _duplicate_variant(_v, var, _last_variant): is_duplicate = False for n, (g, alt, lv) in enumerate(zip(var['GT'], var.alleles, _last_variant)): if g: if (lv[0] == _v.pos) & (lv[1] == _v.ref) & (lv[2] == alt): is_duplicate = True logger.debug( 'Duplicate line {}:{} {} -> {}'.format(_v.contig, _v.pos, _v.ref, _v.alts)) break return is_duplicate
Apache License 2.0
ratschlab/dpsom
som_vae/somvae_model.py
SOMVAE.loss
python
def loss(self): loss = (self.loss_reconstruction + self.alpha*self.loss_commit + self.beta*self.loss_som + self.gamma*self.loss_probabilities + self.tau*self.loss_z_prob) tf.summary.scalar("loss", loss) return loss
Aggregates the loss terms into the total loss.
https://github.com/ratschlab/dpsom/blob/eb41e7d0aae3f213b9ad19f727ea4483543af99a/som_vae/somvae_model.py#L399-L404
import functools import ipdb import numpy as np try: import tensorflow.compat.v1 as tf tf.disable_v2_behavior() except: import tensorflow as tf def weight_variable(shape, name): initial = tf.truncated_normal(shape, stddev=0.1) return tf.Variable(initial,name=name) def bias_variable(shape, name): initial = tf.constant(0.1, shape=shape) return tf.Variable(initial, name=name) def conv2d(x, shape, name, strides=[1,1,1,1]): weight = weight_variable(shape, "{}_W".format(name)) bias = bias_variable([shape[-1]], "{}_b".format(name)) return tf.nn.conv2d(x, weight, strides=strides, padding='SAME', name=name) + bias def conv2d_transposed(x, shape, outshape, name, strides=[1,1,1,1]): weight = weight_variable(shape, "{}_W".format(name)) bias = bias_variable([shape[-2]], "{}_b".format(name)) return tf.nn.conv2d_transpose(x, weight, output_shape=outshape, strides=strides, padding='SAME', name=name) + bias def max_pool_2x2(x): return tf.nn.max_pool(x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME') def conv1d(x, shape, name, stride=1): weight = weight_variable(shape, "{}_W".format(name)) bias = bias_variable([shape[-1]], "{}_b".format(name)) return tf.nn.conv1d(x, weight, stride=stride, padding='SAME', name=name) + bias def max_pool_2x1(x): return tf.layers.max_pooling1d(x, pool_size=2, strides=2, padding='SAME') def lazy_scope(function): attribute = "_cache_" + function.__name__ @property @functools.wraps(function) def decorator(self): if not hasattr(self, attribute): with tf.variable_scope(function.__name__): setattr(self, attribute, function(self)) return getattr(self, attribute) return decorator class SOMVAE: def __init__(self, latent_dim=64, som_dim=[8,8], learning_rate=1e-4, decay_factor=0.95, decay_steps=1000, input_length=28, input_channels=28, alpha=1., beta=1., gamma=1., tau=1., mnist=True): self.latent_dim = latent_dim self.som_dim = som_dim self.learning_rate = learning_rate self.decay_factor = decay_factor self.decay_steps = decay_steps self.input_length = input_length self.input_channels = input_channels self.alpha = alpha self.beta = beta self.gamma = gamma self.tau = tau self.mnist = mnist self.inputs self.is_training self.prediction_input self.batch_size self.embeddings self.transition_probabilities self.global_step self.z_e self.z_e_old self.z_dist_flat self.k self.z_q self.z_q_neighbors self.reconstruction_q self.reconstruction_e self.loss_reconstruction self.loss_commit self.loss_som self.loss_probabilities self.loss_z_prob self.loss self.optimize @lazy_scope def inputs(self): x = tf.placeholder(tf.float32, shape=[None, None, 98], name="x") return x @lazy_scope def is_training(self): is_train = tf.placeholder(tf.bool,shape=[],name="is_train") return is_train @lazy_scope def prediction_input(self): pred_input = tf.placeholder(tf.int64, shape=[None], name="pred_input") return pred_input @lazy_scope def x(self): x = tf.reshape(self.inputs, [-1, 98]) return x @lazy_scope def embeddings(self): embeddings = tf.get_variable("embeddings", self.som_dim+[self.latent_dim], initializer=tf.truncated_normal_initializer(stddev=0.05)) tf.summary.tensor_summary("embeddings", embeddings) return embeddings @lazy_scope def transition_probabilities(self): with tf.variable_scope("probabilities"): probabilities_raw = tf.Variable(tf.zeros(self.som_dim+self.som_dim), name="probabilities_raw") probabilities_positive = tf.exp(probabilities_raw) probabilities_summed = tf.reduce_sum(probabilities_positive, axis=[-1,-2], keepdims=True) probabilities_normalized = probabilities_positive / probabilities_summed return probabilities_normalized @lazy_scope def global_step(self): global_step = tf.Variable(0, trainable=False, name="global_step") return global_step @lazy_scope def batch_size(self): batch_size = tf.shape(self.x)[0] return batch_size @lazy_scope def z_e(self): if not self.mnist: with tf.variable_scope("encoder"): h_1 = tf.keras.layers.Dense(256, activation="relu")(self.x) h_2 = tf.keras.layers.Dense(128, activation="relu")(h_1) z_e = tf.keras.layers.Dense(self.latent_dim, activation="relu")(h_2) return z_e @lazy_scope def z_e_old(self): z_e_old = tf.concat([self.z_e[0:1], self.z_e[:-1]], axis=0) return z_e_old @lazy_scope def z_dist_flat(self): z_dist = tf.squared_difference(tf.expand_dims(tf.expand_dims(self.z_e, 1), 1), tf.expand_dims(self.embeddings, 0)) z_dist_red = tf.reduce_sum(z_dist, axis=-1) z_dist_flat = tf.reshape(z_dist_red, [self.batch_size, -1]) return z_dist_flat @lazy_scope def k(self): k_train = tf.argmin(self.z_dist_flat, axis=-1) k_test = self.prediction_input k=tf.cond(self.is_training, lambda: k_train, lambda: k_test) tf.summary.histogram("clusters", k) return k @lazy_scope def z_q(self): k_1 = self.k // self.som_dim[1] k_2 = self.k % self.som_dim[1] k_stacked = tf.stack([k_1, k_2], axis=1) z_q = tf.gather_nd(self.embeddings, k_stacked) return z_q @lazy_scope def z_q_neighbors(self): k_1 = self.k // self.som_dim[1] k_2 = self.k % self.som_dim[1] k_stacked = tf.stack([k_1, k_2], axis=1) k1_not_top = tf.less(k_1, tf.constant(self.som_dim[0]-1, dtype=tf.int64)) k1_not_bottom = tf.greater(k_1, tf.constant(0, dtype=tf.int64)) k2_not_right = tf.less(k_2, tf.constant(self.som_dim[1]-1, dtype=tf.int64)) k2_not_left = tf.greater(k_2, tf.constant(0, dtype=tf.int64)) k1_up = tf.where(k1_not_top, tf.add(k_1, 1), k_1) k1_down = tf.where(k1_not_bottom, tf.subtract(k_1, 1), k_1) k2_right = tf.where(k2_not_right, tf.add(k_2, 1), k_2) k2_left = tf.where(k2_not_left, tf.subtract(k_2, 1), k_2) z_q_up = tf.where(k1_not_top, tf.gather_nd(self.embeddings, tf.stack([k1_up, k_2], axis=1)), tf.zeros([self.batch_size, self.latent_dim])) z_q_down = tf.where(k1_not_bottom, tf.gather_nd(self.embeddings, tf.stack([k1_down, k_2], axis=1)), tf.zeros([self.batch_size, self.latent_dim])) z_q_right = tf.where(k2_not_right, tf.gather_nd(self.embeddings, tf.stack([k_1, k2_right], axis=1)), tf.zeros([self.batch_size, self.latent_dim])) z_q_left = tf.where(k2_not_left, tf.gather_nd(self.embeddings, tf.stack([k_1, k2_left], axis=1)), tf.zeros([self.batch_size, self.latent_dim])) z_q_neighbors = tf.stack([self.z_q, z_q_up, z_q_down, z_q_right, z_q_left], axis=1) return z_q_neighbors @lazy_scope def reconstruction_q(self): if not self.mnist: with tf.variable_scope("decoder", reuse=tf.AUTO_REUSE): h_3 = tf.keras.layers.Dense(128, activation="relu")(self.z_q) h_4 = tf.keras.layers.Dense(256, activation="relu")(h_3) x_hat = tf.keras.layers.Dense(self.input_channels, activation="sigmoid")(h_4) else: with tf.variable_scope("decoder", reuse=tf.AUTO_REUSE): flat_size = 7*7*256 h_flat_dec = tf.keras.layers.Dense(flat_size)(self.z_q) h_reshaped = tf.reshape(h_flat_dec, [-1, 7, 7, 256]) h_unpool1 = tf.keras.layers.UpSampling2D((2,2))(h_reshaped) h_deconv1 = tf.nn.relu(conv2d(h_unpool1, [4,4,256,256], "deconv1")) h_unpool2 = tf.keras.layers.UpSampling2D((2,2))(h_deconv1) h_deconv2 = tf.nn.sigmoid(conv2d(h_unpool2, [4,4,256,1], "deconv2")) x_hat = h_deconv2 return x_hat @lazy_scope def reconstruction_e(self): if not self.mnist: with tf.variable_scope("decoder", reuse=tf.AUTO_REUSE): h_3 = tf.keras.layers.Dense(128, activation="relu")(self.z_e) h_4 = tf.keras.layers.Dense(256, activation="relu")(h_3) x_hat = tf.keras.layers.Dense(self.input_channels, activation="sigmoid")(h_4) else: with tf.variable_scope("decoder", reuse=tf.AUTO_REUSE): flat_size = 7*7*256 h_flat_dec = tf.keras.layers.Dense(flat_size)(self.z_e) h_reshaped = tf.reshape(h_flat_dec, [-1, 7, 7, 256]) h_unpool1 = tf.keras.layers.UpSampling2D((2,2))(h_reshaped) h_deconv1 = tf.nn.relu(conv2d(h_unpool1, [4,4,256,256], "deconv1")) h_unpool2 = tf.keras.layers.UpSampling2D((2,2))(h_deconv1) h_deconv2 = tf.nn.sigmoid(conv2d(h_unpool2, [4,4,256,1], "deconv2")) x_hat = h_deconv2 return x_hat @lazy_scope def loss_reconstruction(self): loss_rec_mse_zq = tf.losses.mean_squared_error(self.x, self.reconstruction_q) loss_rec_mse_ze = tf.losses.mean_squared_error(self.x, self.reconstruction_e) loss_rec_mse = loss_rec_mse_zq + loss_rec_mse_ze tf.summary.scalar("loss_reconstruction", loss_rec_mse) return loss_rec_mse @lazy_scope def loss_commit(self): loss_commit = tf.reduce_mean(tf.squared_difference(self.z_e, self.z_q)) tf.summary.scalar("loss_commit", loss_commit) return loss_commit @lazy_scope def loss_som(self): loss_som = tf.reduce_mean(tf.squared_difference(tf.expand_dims(tf.stop_gradient(self.z_e), axis=1), self.z_q_neighbors)) tf.summary.scalar("loss_som", loss_som) return loss_som @lazy_scope def loss_probabilities(self): k_1 = self.k // self.som_dim[1] k_2 = self.k % self.som_dim[1] k_1_old = tf.concat([k_1[0:1], k_1[:-1]], axis=0) k_2_old = tf.concat([k_2[0:1], k_2[:-1]], axis=0) k_stacked = tf.stack([k_1_old, k_2_old, k_1, k_2], axis=1) transitions_all = tf.gather_nd(self.transition_probabilities, k_stacked) loss_probabilities = -self.gamma * tf.reduce_mean(tf.log(transitions_all)) return loss_probabilities @lazy_scope def loss_z_prob(self): k_1 = self.k // self.som_dim[1] k_2 = self.k % self.som_dim[1] k_1_old = tf.concat([k_1[0:1], k_1[:-1]], axis=0) k_2_old = tf.concat([k_2[0:1], k_2[:-1]], axis=0) k_stacked_old = tf.stack([k_1_old, k_2_old], axis=1) out_probabilities_old = tf.gather_nd(self.transition_probabilities, k_stacked_old) out_probabilities_flat = tf.reshape(out_probabilities_old, [self.batch_size, -1]) weighted_z_dist_prob = tf.multiply(self.z_dist_flat, out_probabilities_flat) loss_z_prob = tf.reduce_mean(weighted_z_dist_prob) return loss_z_prob @lazy_scope
MIT License
wikimedia/pywikibot
scripts/archive/casechecker.py
CaseChecker.PickTarget
python
def PickTarget(self, title, original, candidates): if not candidates: return None if len(candidates) == 1: return candidates[0] pagesDontExist = [] pagesRedir = {} pagesExist = [] for newTitle in candidates: dst = self.Page(newTitle) if not dst.exists(): pagesDontExist.append(newTitle) elif dst.isRedirectPage(): pagesRedir[newTitle] = dst.getRedirectTarget().title() else: pagesExist.append(newTitle) if len(pagesExist) == 1: return pagesExist[0] if not pagesExist and pagesRedir: if len(pagesRedir) == 1: return list(pagesRedir.keys())[0] t = None for v in pagesRedir.values(): if not t: t = v elif t != v: break else: return list(pagesRedir.keys())[0] if not self.autonomous: pywikibot.output('Could not auto-decide for page {}. Which link ' 'should be chosen?' .format(self.MakeLink(title, False))) pywikibot.output('Original title: ', newline=False) self.ColorCodeWord(original + '\n', True) for count, t in enumerate(candidates, 1): if t in pagesDontExist: msg = 'missing' elif t in pagesRedir: msg = 'Redirect to ' + pagesRedir[t] else: msg = 'page exists' self.ColorCodeWord(' {}: {} ({})\n'.format(count, t, msg), True) answers = [('skip', 's')] + [(str(i), i) for i in range(1, count)] choice = pywikibot.input_choice('Which link to choose?', answers) if choice != 's': return candidates[int(choice) - 1]
Pick target from candidates.
https://github.com/wikimedia/pywikibot/blob/5097f5b9a7ef9d39f35f17edd11faf3086a01d1d/scripts/archive/casechecker.py#L575-L632
import codecs import os import re import sys from itertools import chain, combinations from string import ascii_letters import pywikibot from pywikibot import i18n from pywikibot.data import api from pywikibot.exceptions import LockedPageError, PageSaveRelatedError from pywikibot.tools import first_lower, first_upper, formatter from scripts.category import CategoryMoveRobot as CategoryMoveBot class CaseChecker: alwaysInLocal = ['СССР', 'Как', 'как'] alwaysInLatin = ['II', 'III'] localUpperLtr = 'ЁІЇЎАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯҐ' localLowerLtr = 'ёіїўабвгдежзийклмнопрстуфхцчшщъыьэюяґ' localLtr = localUpperLtr + localLowerLtr localSuspects = 'АВЕКМНОРСТХІЁЇаеорсухіёї' latinSuspects = 'ABEKMHOPCTXIËÏaeopcyxiëï' localKeyboard = 'йцукенгшщзфывапролдячсмить' latinKeyboard = 'qwertyuiopasdfghjklzxcvbnm' romanNumChars = 'IVXLCDM' romannumSuffixes = localLowerLtr romanNumSfxPtrn = re.compile( '[{}]+[{}]+$'.format(romanNumChars, localLowerLtr)) whitelists = { 'ru': 'ВП:КЛ/Проверенные', } lclClrFnt = '<font color=green>' latClrFnt = '<font color=brown>' suffixClr = '</font>' colorFormatLocalColor = '{green}' colorFormatLatinColor = '{red}' colorFormatSuffix = '{default}' wordBreaker = re.compile(r'[ _\-/\|#[\]():]') stripChars = ' \t,' titles = True links = False aplimit = None apfrom = '' title = None replace = False stopAfter = -1 wikilog = None wikilogfile = 'wikilog.txt' failedTitles = 'failedTitles.txt' nosuggestions = 'nosuggestions.txt' doFailed = False titleList = None autonomous = False namespaces = [] filterredir = 'nonredirects' def handle_args(self): for arg in pywikibot.handle_args(): arg, _, value = arg.partition(':') if arg == '-from': self.apfrom = value or pywikibot.input( 'Which page to start from: ') elif arg == '-reqsize': self.aplimit = int(value) elif arg == '-links': self.links = True elif arg == '-linksonly': self.links = True self.titles = False elif arg == '-replace': self.replace = True elif arg == '-redir': self.filterredir = 'all' elif arg == '-redironly': self.filterredir = 'redirects' elif arg == '-limit': self.stopAfter = int(value) elif arg in ('-autonomous', '-a'): self.autonomous = True elif arg == '-ns': self.namespaces.append(int(value)) elif arg == '-wikilog': self.wikilogfile = value elif arg == '-failedlog': self.failedTitles = value elif arg == '-failed': self.doFailed = True else: pywikibot.output('Unknown argument {}.'.format(arg)) pywikibot.show_help() sys.exit() def __init__(self): self.handle_args() if not self.namespaces and not self.doFailed: if not self.apfrom: self.namespaces = [14, 10, 12, 0] else: self.namespaces = [0] if not self.aplimit: self.aplimit = 200 if self.links else 'max' if not self.doFailed: self.queryParams = {'action': 'query', 'generator': 'allpages', 'gaplimit': self.aplimit, 'gapfilterredir': self.filterredir} else: self.queryParams = {'action': 'query'} if self.apfrom: pywikibot.output('Argument "-from" is ignored with "-failed"') propParam = 'info' if self.links: propParam += '|links|categories' self.queryParams['pllimit'] = 'max' self.queryParams['cllimit'] = 'max' self.queryParams['prop'] = propParam self.site = pywikibot.Site() if len(self.localSuspects) != len(self.latinSuspects): raise ValueError('Suspects must be the same size') if len(self.localKeyboard) != len(self.latinKeyboard): raise ValueError('Keyboard info must be the same size') if not os.path.isabs(self.wikilogfile): self.wikilogfile = pywikibot.config.datafilepath(self.wikilogfile) self.wikilog = self.OpenLogFile(self.wikilogfile) if not os.path.isabs(self.failedTitles): self.failedTitles = pywikibot.config.datafilepath( self.failedTitles) if self.doFailed: with codecs.open(self.failedTitles, 'r', 'utf-8') as f: self.titleList = [self.Page(t) for t in f] self.failedTitles += '.failed' iterzip = zip(self.localSuspects, self.latinSuspects) self.lclToLatDict = {ord(local): latin for local, latin in iterzip} self.latToLclDict = {ord(latin): local for local, latin in iterzip} if self.localKeyboard is not None: iterzip = zip(self.localKeyboard, self.latinKeyboard) self.lclToLatKeybDict = {ord(local): latin for local, latin in iterzip} self.latToLclKeybDict = {ord(latin): local for local, latin in iterzip} else: self.lclToLatKeybDict = {} self.latToLclKeybDict = {} badPtrnStr = '([{ascii}][{local}]|[{local}][{ascii}])'.format( ascii=ascii_letters, local=self.localLtr) self.badWordPtrn = re.compile('[{ascii}{local}]*{bad}[{ascii}{local}]*' .format(ascii=ascii_letters, local=self.localLtr, bad=badPtrnStr)) self.get_whitelist() def get_whitelist(self): self.knownWords = set() self.seenUnresolvedLinks = set() if self.site.code in self.whitelists: wlpage = self.whitelists[self.site.code] pywikibot.output('Loading whitelist from {}'.format(wlpage)) wlparams = { 'action': 'query', 'prop': 'links', 'titles': wlpage, 'redirects': '', 'indexpageids': '', 'pllimit': 'max', } req = api.Request(site=self.site, parameters=wlparams) data = req.submit() if len(data['query']['pageids']) == 1: pageid = data['query']['pageids'][0] links = data['query']['pages'][pageid]['links'] self.knownWords = {nn for n in links for nn in self.FindBadWords(n['title'])} else: raise ValueError('The number of pageids is not 1') pywikibot.output('Loaded whitelist with {} items' .format(len(self.knownWords))) if self.knownWords: pywikibot.log('Whitelist: ' + ', '.join(self.MakeLink(i, False) for i in self.knownWords)) else: pywikibot.output( 'Whitelist is not known for language ' + self.site.code) def RunQuery(self, params): while True: req = api.Request(**params) data = req.submit() yield data if 'clcontinue' in params: del params['clcontinue'] if 'plcontinue' in params: del params['plcontinue'] if 'query-continue' not in data: if 'gapcontinue' in params: del params['gapcontinue'] break qc = data['query-continue'] if 'categories' in qc or 'links' in qc: if 'categories' in qc: params.update(qc['categories']) if 'links' in qc: params.update(qc['links']) elif 'allpages' in qc: params.update(qc['allpages']) else: raise ValueError('Unexpected query-continue values: {}' .format(qc)) def Run(self): try: self.lastLetter = '' if not self.doFailed: for namespace in self.namespaces: self.currentTitle = None self.queryParams['gapnamespace'] = namespace self.queryParams['gapfrom'] = self.apfrom for data in self.RunQuery(self.queryParams): self.ProcessDataBlock(data) else: self.currentTitle = None batchSize = 10 for batchStart in range(0, len(self.titleList), batchSize): self.queryParams['titles'] = self.titleList[ batchStart:batchStart + batchSize] for data in self.RunQuery(self.queryParams): self.ProcessDataBlock(data) except Exception: pywikibot.output('Exception at Title = {}, Next = {}' .format(self.currentTitle, self.apfrom)) pywikibot.exception() raise def ProcessDataBlock(self, data): if 'query' not in data or 'pages' not in data['query']: return firstItem = True for page in data['query']['pages'].values(): printed = False title = page['title'] self.currentTitle = title if 'missing' in page: continue if firstItem: if self.lastLetter != title[0]: pywikibot.output('Processing {}\n'.format(title)) self.lastLetter = title[0] firstItem = False if self.titles: err = self.ProcessTitle(title) if err: changed = False if self.replace: if len(err[1]) == 1: newTitle = err[1][0] editSummary = i18n.twtranslate( self.site, 'casechecker-rename') dst = self.Page(newTitle) if 'redirect' in page: src = self.Page(title) redir = src.getRedirectTarget() redirTitle = redir.title(as_link=True, textlink=True) if not dst.exists(): src.move(newTitle, editSummary, movesubpages=True) changed = True replErrors = False for p in src.getReferences( follow_redirects=False): if p.namespace() == 2: continue oldText = p.text newText = self.ReplaceLink(oldText, title, newTitle) if not self.PutNewPage( p, newText, [self.MakeMoveSummary(title, newTitle)]): replErrors = True if not replErrors: editSummary = i18n.twtranslate( self.site, 'casechecker-delete-summary') newText = i18n.twtranslate( self.site, 'casechecker-delete-reason', redirTitle) if newText: src.text = '{{delete}}\n\n' + newText src.save(editSummary, minor=False) changed = True elif not dst.exists(): src = self.Page(title) if page['ns'] == 14: dst = self.Page(newTitle) bot = CategoryMoveBot( src.title(with_ns=False), dst.title(with_ns=False), self.autonomous, editSummary + ' ' + self.MakeMoveSummary(title, newTitle), True) bot.run() else: src.move(newTitle, editSummary, movesubpages=True) changed = True if not changed: if err[1]: self.AppendLineToLog(self.failedTitles, title) else: self.AddNoSuggestionTitle(title) self.WikiLog('* ' + err[0]) printed = True if self.links: allLinks = [] if 'links' in page: allLinks += page['links'] if 'categories' in page: allLinks += page['categories'] if allLinks: pageObj = None pageTxt = None msg = [] foundSuggestions = False for link in allLinks: ltxt = link['title'] err = self.ProcessTitle(ltxt) if err: if err[1]: foundSuggestions = True elif self.AddNoSuggestionTitle(ltxt): continue newTitle = None if self.replace: newTitle = self.PickTarget(title, ltxt, err[1]) if newTitle: if pageObj is None: pageObj = self.Page(title) pageTxt = pageObj.get() msg.append(self.MakeMoveSummary(ltxt, newTitle)) pageTxt = self.ReplaceLink(pageTxt, ltxt, newTitle) if not newTitle: if not printed: self.WikiLog('* {}: link to {}' .format(self.MakeLink(title, False), err[0])) printed = True else: self.WikiLog('** link to {}' .format(err[0])) if pageObj is not None: if self.PutNewPage(pageObj, pageTxt, msg): foundSuggestions = False if foundSuggestions: self.AppendLineToLog(self.failedTitles, title) if self.stopAfter: self.stopAfter -= 1 if self.stopAfter == 0: raise ValueError('Stopping because we are done') def WikiLog(self, text): pywikibot.output(text) self.wikilog.write(text + '\n') self.wikilog.flush() def FindBadWords(self, title): for m in self.badWordPtrn.finditer(title): yield title[m.span()[0]:m.span()[1]] def ProcessTitle(self, title): badWords = list(self.FindBadWords(title)) if badWords: badWords = {i for i in badWords if i not in self.knownWords and self.romanNumSfxPtrn.match(i) is not None} if not badWords or self.Page(title).is_filepage(): return None count = 0 ambigBadWords = set() ambigBadWordsCount = 0 mapLcl = {} mapLat = {} for badWord in badWords: mightBeLat = mightBeLcl = True for letter in badWord: if letter in self.localLtr: if mightBeLat and letter not in self.localSuspects: mightBeLat = False else: if mightBeLcl and letter not in self.latinSuspects: mightBeLcl = False if letter not in ascii_letters: raise ValueError('Assert failed') if mightBeLcl and mightBeLat: if badWord in self.alwaysInLocal: mightBeLat = False elif badWord in self.alwaysInLatin: mightBeLcl = False if mightBeLcl: mapLcl[badWord] = badWord.translate(self.latToLclDict) if mightBeLat: mapLat[badWord] = badWord.translate(self.lclToLatDict) if mightBeLcl and mightBeLat: ambigBadWords.add(badWord) ambigBadWordsCount += 1 if not mightBeLcl and not mightBeLat: bwLen = len(badWord) kw = [w for w in self.knownWords if len(w) == bwLen] for p in range(bwLen): if not kw: break c = badWord[p] co = ord(c) if co in self.latToLclDict: c2 = self.latToLclDict[co] elif co in self.lclToLatDict: c2 = self.lclToLatDict[co] else: c2 = None kw = [w for w in kw if p < len(w) and (w[p] == c or (c2 is not None and w[p] == c2))] if len(kw) > 1: pywikibot.output("Word '{}' could be treated as more than " 'one known words'.format(badWord)) elif len(kw) == 1: mapLcl[badWord] = kw[0] count += 1 infoText = self.MakeLink(title) possibleAlternatives = [] if len(mapLcl) + len(mapLat) - ambigBadWordsCount < count: suggestions = list(mapLcl.values()) + list(mapLat.values()) if suggestions: infoText += ', word suggestions: ' + ', '.join( self.ColorCodeWord(t) for t in suggestions) else: infoText += ', no suggestions' else: for k, v in dict(chain(mapLat.items(), mapLcl.items())).items(): if k not in ambigBadWords: title = title.replace(k, v) if not ambigBadWords: possibleAlternatives.append(title) infoText += ', convert to ' + self.MakeLink(title) else: for itemCntToPick in range(len(ambigBadWords) + 1): title2 = title for uc in combinations(list(ambigBadWords), itemCntToPick): wordsToLat = ambigBadWords.copy() for bw in uc: title2 = title2.replace(bw, mapLcl[bw]) wordsToLat.remove(bw) for bw in wordsToLat: title2 = title2.replace(bw, mapLat[bw]) possibleAlternatives.append(title2) if possibleAlternatives: infoText += ', can be converted to ' + ', '.join( self.MakeLink(t) for t in possibleAlternatives) else: infoText += ', no suggestions' return (infoText, possibleAlternatives)
MIT License
microsoft/sara
interactions/input.py
parse_input_event
python
def parse_input_event(msg_type, target, logs, nested_events, log_begin_idx): log_info = [util.extract_info(log) for log in logs] plid, package = log_info[0]['plid'], log_info[0]['package'] perform_editor_action = None string_info = list() text_view_id = None text_address = infer_input(log_info) print('Inferred Text Address: ', text_address) last_editable_input_connection_log = 0 for idx, lf in enumerate(log_info): if lf['tag'] == util.SPANNER_STRING_BUILDER_TAG and 'address' in lf['content'] and lf['content']['address'] == text_address: string_info.append((idx, lf, last_editable_input_connection_log)) elif lf['tag'] == util.EDITABLE_INPUT_CONNECTION_TAG: last_editable_input_connection_log = idx if lf['content']['event'] == 'performEditorAction': perform_editor_action = (idx, lf,) if text_view_id is None: text_view_id = lf['content']['TextViewId'] assert text_view_id == lf['content']['TextViewId'] text_view_info = { 'classname': log_info[0]['content']['TextViewClassname'], 'x': log_info[0]['content']['TextViewX'], 'y': log_info[0]['content']['TextViewY'], 'id': log_info[0]['content']['TextViewId'], 'width': log_info[0]['content']['TextViewWidth'], 'height': log_info[0]['content']['TextViewHeight'], 'position_in_screen': log_info[0]['content']['TextViewPositionInScreen'], 'TextView': log_info[0]['content']['TextView'] } if perform_editor_action is not None: editor_action_idx = perform_editor_action[0] text_view_info['position_in_screen'] = log_info[editor_action_idx]['content']['TextViewPositionInScreen'] editor_action_event = event.InputEvent( plid=plid, package=package, log_begin_idx=editor_action_idx + log_begin_idx, msg_type=msg_type, target=target, begin_ts=log_info[editor_action_idx]['ts'], end_ts=log_info[editor_action_idx]['ts'], text_view_info=text_view_info, text=None, action=perform_editor_action[1]['content']['actionCode'] ) idx = 0 for ne in nested_events: if editor_action_event.log_begin_idx < ne.log_begin_idx: nested_events.insert(idx, editor_action_event) break idx += 1 else: nested_events.append(editor_action_event) if len(string_info) == 0: return nested_events pprint(string_info) begin_ts = log_info[0]['ts'] event_list = list() previous_idx = 0 for ne_idx, ne in enumerate(nested_events): if ne.msg_type == event.Event.KEY_EVENT: if not ne.is_back(): continue anchor = ne.log_begin_idx last_idx = len(string_info) for idx, si in enumerate(string_info): if si[0] + log_begin_idx > anchor: last_idx = idx break sub_list = string_info[previous_idx:last_idx] previous_idx = last_idx if len(sub_list) > 0: string_input = '' string_log_begin_idx = sub_list[-1][0] + log_begin_idx for si in reversed(sub_list): text = si[1]['content']['text'] if len(text) != 0: string_input = text text_view_info['position_in_screen'] = log_info[si[2]]['content']['TextViewPositionInScreen'] end_ts = si[1]['ts'] string_log_begin_idx = si[0] + log_begin_idx break else: end_ts = string_info[last_idx - 1][1]['ts'] event_list.append( event.InputEvent( plid=plid, package=package, log_begin_idx=string_log_begin_idx, msg_type=msg_type, target=target, begin_ts=begin_ts, end_ts=end_ts, text_view_info=text_view_info, text=string_input, action=None ) ) begin_ts = ne.end_ts event_list.append(ne) if ne.msg_type == event.Event.TYPE_EVENT and ne.action_type == event.InputEvent.ENTER_ACTION: for _ne in nested_events[ne_idx+1:]: if _ne.msg_type == event.Event.KEY_EVENT: if not _ne.is_back(): continue event_list.append(_ne) break print(event_list) if len(nested_events) == 0: last_text = string_info[previous_idx:][-1] begin_ts = end_ts = last_text[1]['ts'] event_list.append( event.InputEvent( plid=plid, package=package, log_begin_idx=last_text[0] + log_begin_idx, msg_type=msg_type, target=target, begin_ts=begin_ts, end_ts=end_ts, text_view_info=text_view_info, text=last_text[1]['content']['text'], action=None ) ) return event_list
Parse input event logs :params msg_type :params target :params logs [Pair of (Down, Up)] :return: list of event.Event
https://github.com/microsoft/sara/blob/4d4636a93fb0356686ca143722ec29a87205cd97/interactions/input.py#L80-L248
from . import util from . import event from pprint import pprint def infer_input(log_info): editable_address = dict() input_connection_count = 0 m_begin_batch_edit_0 = list() for idx, lf in enumerate(log_info): if lf['tag'] == util.SPANNER_STRING_BUILDER_TAG: if 'address' in lf['content'] and lf['content']['address'] is not None: address = lf['content']['address'] if address not in editable_address: editable_address[address] = { 'length': 0, 'count': 0, 'index': [], 'match': 0 } editable_address[address]['count'] += 1 editable_address[address]['length'] = max(editable_address[address]['length'], len(lf['content']['text'])) editable_address[address]['index'].append(idx) elif lf['tag'] == util.EDITABLE_INPUT_CONNECTION_TAG: input_connection_count += 1 if lf['content']['mBatchEditNesting'] == 0: m_begin_batch_edit_0.append(idx) print(editable_address) pre_batch_edit_idx = 0 for curr_batch_edit_idx in m_begin_batch_edit_0: for address, value in editable_address.items(): for aidx in value['index']: if pre_batch_edit_idx < aidx < curr_batch_edit_idx: value['match'] += 1 break pre_batch_edit_idx = curr_batch_edit_idx max_value = 0 max_address_list = list() for address, value in editable_address.items(): if value['match'] > max_value: max_value = value['match'] max_address_list = [address] elif value['match'] == max_value: max_address_list.append(address) if len(max_address_list) == 0: return '' if len(max_address_list) == 1: return max_address_list[0] threshold = input_connection_count for address in max_address_list: editable_address[address]['count'] = abs(threshold - value['count']) min_count = len(log_info) text_address = None text_length = 0 for address in max_address_list: if editable_address[address]['count'] < min_count: min_count = editable_address[address]['count'] text_address = address text_length = editable_address[address]['length'] elif editable_address[address]['count'] == min_count: if editable_address[address]['length'] > text_length: text_address = address text_length = editable_address[address]['length'] return text_address
MIT License
tdryer/hangups
hangups/ui/__main__.py
ConversationEventListWalker.prev_position
python
def prev_position(self, position): return self._get_position(position, prev=True)
Return the position above position or raise IndexError.
https://github.com/tdryer/hangups/blob/c789b474672ceb301fa0dd2005a04aa4e838ec7a/hangups/ui/__main__.py#L846-L848
import appdirs import asyncio import configargparse import contextlib import logging import os import sys import urwid import readlike from bisect import bisect import hangups from hangups.ui.emoticon import replace_emoticons from hangups.ui import notifier from hangups.ui.utils import get_conv_name, add_color_to_scheme if urwid.__version__ == '1.2.2-dev': sys.exit('error: hangups-urwid package is installed\n\n' 'Please uninstall hangups-urwid and urwid, and reinstall ' 'hangups.') LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' COL_SCHEMES = { 'default': { ('active_tab', '', ''), ('inactive_tab', 'standout', ''), ('msg_date', '', ''), ('msg_sender', '', ''), ('msg_self', '', ''), ('msg_text', '', ''), ('msg_text_self', '', ''), ('msg_watermark', '', ''), ('msg_selected', 'standout', ''), ('status_line', 'standout', ''), ('tab_background', 'standout', ''), }, 'solarized-dark': { ('active_tab', 'light gray', 'light blue'), ('inactive_tab', 'underline', 'light green'), ('msg_date', 'dark cyan', ''), ('msg_sender', 'dark blue', ''), ('msg_text_self', '', ''), ('msg_self', 'dark green', ''), ('msg_text', '', ''), ('msg_watermark', 'light gray', ''), ('msg_selected', 'standout', ''), ('status_line', 'standout', ''), ('tab_background', 'black,standout,underline', 'light green'), }, } COL_SCHEME_NAMES = ( 'active_tab', 'inactive_tab', 'msg_date', 'msg_sender', 'msg_self', 'msg_text', 'msg_text_self', 'status_line', 'tab_background' ) DISCREET_NOTIFICATION = notifier.Notification( 'hangups', 'Conversation', 'New message' ) class HangupsDisconnected(Exception): class ChatUI: def __init__(self, refresh_token_path, keybindings, palette, palette_colors, datetimefmt, notifier_, discreet_notifications, manual_login, keep_emoticons): self._keys = keybindings self._datetimefmt = datetimefmt self._notifier = notifier_ self._discreet_notifications = discreet_notifications self._keep_emoticons = keep_emoticons set_terminal_title('hangups') self._conv_widgets = {} self._tabbed_window = None self._conv_list = None self._user_list = None self._coroutine_queue = CoroutineQueue() self._exception = None try: cookies = hangups.auth.get_auth_stdin( refresh_token_path, manual_login ) except hangups.GoogleAuthError as e: sys.exit('Login failed ({})'.format(e)) self._client = hangups.Client(cookies) self._client.on_connect.add_observer(self._on_connect) loop = asyncio.get_event_loop() loop.set_exception_handler(self._exception_handler) try: self._urwid_loop = urwid.MainLoop( LoadingWidget(), palette, handle_mouse=False, input_filter=self._input_filter, event_loop=urwid.AsyncioEventLoop(loop=loop) ) except urwid.AttrSpecError as e: sys.exit(e) self._urwid_loop.screen.set_terminal_properties(colors=palette_colors) self._urwid_loop.start() coros = [self._connect(), self._coroutine_queue.consume()] with bracketed_paste_mode(): try: loop.run_until_complete(asyncio.gather(*coros)) except HangupsDisconnected: pass finally: self._urwid_loop.stop() task = asyncio.gather(*coros, return_exceptions=True) task.cancel() try: loop.run_until_complete(task) except asyncio.CancelledError: pass loop.close() if self._exception: raise self._exception async def _connect(self): await self._client.connect() raise HangupsDisconnected() def _exception_handler(self, _loop, context): self._coroutine_queue.put(self._client.disconnect()) default_exception = Exception(context.get('message')) self._exception = context.get('exception', default_exception) def _input_filter(self, keys, _): if keys == [self._keys['menu']]: if self._urwid_loop.widget == self._tabbed_window: self._show_menu() else: self._hide_menu() elif keys == [self._keys['quit']]: self._coroutine_queue.put(self._client.disconnect()) else: return keys def _show_menu(self): current_widget = self._tabbed_window.get_current_widget() if hasattr(current_widget, 'get_menu_widget'): menu_widget = current_widget.get_menu_widget(self._hide_menu) overlay = urwid.Overlay(menu_widget, self._tabbed_window, align='center', width=('relative', 80), valign='middle', height=('relative', 80)) self._urwid_loop.widget = overlay def _hide_menu(self): self._urwid_loop.widget = self._tabbed_window def get_conv_widget(self, conv_id): if conv_id not in self._conv_widgets: set_title_cb = (lambda widget, title: self._tabbed_window.set_tab(widget, title=title)) widget = ConversationWidget( self._client, self._coroutine_queue, self._conv_list.get(conv_id), set_title_cb, self._keys, self._datetimefmt, self._keep_emoticons ) self._conv_widgets[conv_id] = widget return self._conv_widgets[conv_id] def add_conversation_tab(self, conv_id, switch=False): conv_widget = self.get_conv_widget(conv_id) self._tabbed_window.set_tab(conv_widget, switch=switch, title=conv_widget.title) def on_select_conversation(self, conv_id): self.add_conversation_tab(conv_id, switch=True) async def _on_connect(self): self._user_list, self._conv_list = ( await hangups.build_user_conversation_list(self._client) ) self._conv_list.on_event.add_observer(self._on_event) conv_picker = ConversationPickerWidget(self._conv_list, self.on_select_conversation, self._keys) self._tabbed_window = TabbedWindowWidget(self._keys) self._tabbed_window.set_tab(conv_picker, switch=True, title='Conversations') self._urwid_loop.widget = self._tabbed_window def _on_event(self, conv_event): conv = self._conv_list.get(conv_event.conversation_id) user = conv.get_user(conv_event.user_id) show_notification = all(( isinstance(conv_event, hangups.ChatMessageEvent), not user.is_self, not conv.is_quiet, )) if show_notification: self.add_conversation_tab(conv_event.conversation_id) if self._discreet_notifications: notification = DISCREET_NOTIFICATION else: notification = notifier.Notification( user.full_name, get_conv_name(conv), conv_event.text ) self._notifier.send(notification) class CoroutineQueue: def __init__(self): self._queue = asyncio.Queue() def put(self, coro): assert asyncio.iscoroutine(coro) self._queue.put_nowait(coro) async def consume(self): while True: coro = await self._queue.get() assert asyncio.iscoroutine(coro) await coro class WidgetBase(urwid.WidgetWrap): def keypress(self, size, key): return super().keypress(size, key) class LoadingWidget(WidgetBase): def __init__(self): super().__init__(urwid.Filler( urwid.Text('Connecting...', align='center') )) class RenameConversationDialog(WidgetBase): def __init__(self, coroutine_queue, conversation, on_cancel, on_save, keybindings): self._coroutine_queue = coroutine_queue self._conversation = conversation edit = urwid.Edit(edit_text=get_conv_name(conversation)) items = [ urwid.Text('Rename conversation:'), edit, urwid.Button( 'Save', on_press=lambda _: self._rename(edit.edit_text, on_save) ), urwid.Button('Cancel', on_press=lambda _: on_cancel()), ] list_walker = urwid.SimpleFocusListWalker(items) list_box = ListBox(keybindings, list_walker) super().__init__(list_box) def _rename(self, name, callback): self._coroutine_queue.put(self._conversation.rename(name)) callback() class ConversationMenu(WidgetBase): def __init__(self, coroutine_queue, conversation, close_callback, keybindings): rename_dialog = RenameConversationDialog( coroutine_queue, conversation, lambda: frame.contents.__setitem__('body', (list_box, None)), close_callback, keybindings ) items = [ urwid.Text( 'Conversation name: {}'.format(get_conv_name(conversation)) ), urwid.Button( 'Change Conversation Name', on_press=lambda _: frame.contents.__setitem__( 'body', (rename_dialog, None) ) ), urwid.Divider('-'), urwid.Button('Back', on_press=lambda _: close_callback()), ] list_walker = urwid.SimpleFocusListWalker(items) list_box = ListBox(keybindings, list_walker) frame = urwid.Frame(list_box) padding = urwid.Padding(frame, left=1, right=1) line_box = urwid.LineBox(padding, title='Conversation Menu') super().__init__(line_box) class ConversationButton(WidgetBase): def __init__(self, conversation, on_press): conversation.on_event.add_observer(self._on_event) conversation.on_watermark_notification.add_observer(self._on_event) self._conversation = conversation self._button = urwid.Button(self._get_label(), on_press=on_press, user_data=conversation.id_) super().__init__(self._button) def _get_label(self): return get_conv_name(self._conversation, show_unread=True) def _on_event(self, _): self._button.set_label(self._get_label()) @property def last_modified(self): return self._conversation.last_modified class ConversationListWalker(urwid.SimpleFocusListWalker): def __init__(self, conversation_list, on_select): self._conversation_list = conversation_list self._conversation_list.on_event.add_observer(self._on_event) self._on_press = lambda button, conv_id: on_select(conv_id) convs = sorted(conversation_list.get_all(), reverse=True, key=lambda c: c.last_modified) buttons = [ConversationButton(conv, on_press=self._on_press) for conv in convs] super().__init__(buttons) def _on_event(self, _): self.sort(key=lambda conv_button: conv_button.last_modified, reverse=True) class ListBox(WidgetBase): def __init__(self, keybindings, list_walker): self._keybindings = keybindings super().__init__(urwid.ListBox(list_walker)) def keypress(self, size, key): key = super().keypress(size, key) if key == self._keybindings['down']: super().keypress(size, 'down') elif key == self._keybindings['up']: super().keypress(size, 'up') elif key == self._keybindings['page_up']: super().keypress(size, 'page up') elif key == self._keybindings['page_down']: super().keypress(size, 'page down') else: return key class ConversationPickerWidget(WidgetBase): def __init__(self, conversation_list, on_select, keybindings): list_walker = ConversationListWalker(conversation_list, on_select) list_box = ListBox(keybindings, list_walker) widget = urwid.Padding(list_box, left=2, right=2) super().__init__(widget) class ReturnableEdit(urwid.Edit): def __init__(self, on_return, keybindings, caption=None): super().__init__(caption=caption, multiline=True) self._on_return = on_return self._keys = keybindings self._paste_mode = False def keypress(self, size, key): if key == 'begin paste': self._paste_mode = True elif key == 'end paste': self._paste_mode = False elif key == 'enter' and not self._paste_mode: self._on_return(self.get_edit_text()) self.set_edit_text('') elif key not in self._keys.values() and key in readlike.keys(): text, pos = readlike.edit(self.edit_text, self.edit_pos, key) self.set_edit_text(text) self.set_edit_pos(pos) else: return super().keypress(size, key) class StatusLineWidget(WidgetBase): _MESSAGE_DELAY_SECS = 10 def __init__(self, client, conversation): self._typing_statuses = {} self._conversation = conversation self._conversation.on_event.add_observer(self._on_event) self._conversation.on_typing.add_observer(self._on_typing) self._widget = urwid.Text('', align='center') self._is_connected = True self._message = None self._message_handle = None client.on_disconnect.add_observer(self._on_disconnect) client.on_reconnect.add_observer(self._on_reconnect) super().__init__(urwid.AttrMap(self._widget, 'status_line')) def show_message(self, message_str): if self._message_handle is not None: self._message_handle.cancel() self._message_handle = asyncio.get_event_loop().call_later( self._MESSAGE_DELAY_SECS, self._clear_message ) self._message = message_str self._update() def _clear_message(self): self._message = None self._message_handle = None self._update() def _on_disconnect(self): self._is_connected = False self._update() def _on_reconnect(self): self._is_connected = True self._update() def _on_event(self, conv_event): if isinstance(conv_event, hangups.ChatMessageEvent): self._typing_statuses[conv_event.user_id] = ( hangups.TYPING_TYPE_STOPPED ) self._update() def _on_typing(self, typing_message): self._typing_statuses[typing_message.user_id] = typing_message.status self._update() def _update(self): typing_users = [self._conversation.get_user(user_id) for user_id, status in self._typing_statuses.items() if status == hangups.TYPING_TYPE_STARTED] displayed_names = [user.first_name for user in typing_users if not user.is_self] if displayed_names: typing_message = '{} {} typing...'.format( ', '.join(sorted(displayed_names)), 'is' if len(displayed_names) == 1 else 'are' ) else: typing_message = '' if not self._is_connected: self._widget.set_text("RECONNECTING...") elif self._message is not None: self._widget.set_text(self._message) else: self._widget.set_text(typing_message) class MessageWidget(WidgetBase): def __init__(self, timestamp, text, datetimefmt, user=None, show_date=False, watermark_users=None): self.timestamp = timestamp text = [ ('msg_date', self._get_date_str(timestamp, datetimefmt, show_date=show_date) + ' '), ('msg_text_self' if user is not None and user.is_self else 'msg_text', text) ] if user is not None: text.insert(1, ('msg_self' if user.is_self else 'msg_sender', user.first_name + ': ')) if watermark_users is not None and bool(watermark_users): sorted_users = sorted([x.first_name for x in watermark_users]) watermark = "\n[ Seen by {}. ]".format(', '.join(sorted_users)) text.append(('msg_watermark', watermark)) self._widget = urwid.SelectableIcon(text, cursor_position=0) super().__init__(urwid.AttrMap( self._widget, '', { None: 'msg_selected', 'msg_date': 'msg_selected', 'msg_text_self': 'msg_selected', 'msg_text': 'msg_selected', 'msg_self': 'msg_selected', 'msg_sender': 'msg_selected', 'msg_watermark': 'msg_selected', } )) @staticmethod def _get_date_str(timestamp, datetimefmt, show_date=False): fmt = '' if show_date: fmt += '\n'+datetimefmt.get('date', '')+'\n' fmt += datetimefmt.get('time', '') return timestamp.astimezone(tz=None).strftime(fmt) def __lt__(self, other): return self.timestamp < other.timestamp @staticmethod def from_conversation_event(conversation, conv_event, prev_conv_event, datetimefmt, watermark_users=None): user = conversation.get_user(conv_event.user_id) if prev_conv_event is not None: is_new_day = (conv_event.timestamp.astimezone(tz=None).date() != prev_conv_event.timestamp.astimezone(tz=None).date()) else: is_new_day = False if isinstance(conv_event, hangups.ChatMessageEvent): return MessageWidget(conv_event.timestamp, conv_event.text, datetimefmt, user, show_date=is_new_day, watermark_users=watermark_users) elif isinstance(conv_event, hangups.RenameEvent): if conv_event.new_name == '': text = ('{} cleared the conversation name' .format(user.first_name)) else: text = ('{} renamed the conversation to {}' .format(user.first_name, conv_event.new_name)) return MessageWidget(conv_event.timestamp, text, datetimefmt, show_date=is_new_day, watermark_users=watermark_users) elif isinstance(conv_event, hangups.MembershipChangeEvent): event_users = [conversation.get_user(user_id) for user_id in conv_event.participant_ids] names = ', '.join([user.full_name for user in event_users]) if conv_event.type_ == hangups.MEMBERSHIP_CHANGE_TYPE_JOIN: text = ('{} added {} to the conversation' .format(user.first_name, names)) else: text = ('{} left the conversation'.format(names)) return MessageWidget(conv_event.timestamp, text, datetimefmt, show_date=is_new_day, watermark_users=watermark_users) elif isinstance(conv_event, hangups.HangoutEvent): text = { hangups.HANGOUT_EVENT_TYPE_START: ( 'A Hangout call is starting.' ), hangups.HANGOUT_EVENT_TYPE_END: ( 'A Hangout call ended.' ), hangups.HANGOUT_EVENT_TYPE_ONGOING: ( 'A Hangout call is ongoing.' ), }.get(conv_event.event_type, 'Unknown Hangout call event.') return MessageWidget(conv_event.timestamp, text, datetimefmt, show_date=is_new_day, watermark_users=watermark_users) elif isinstance(conv_event, hangups.GroupLinkSharingModificationEvent): status_on = hangups.GROUP_LINK_SHARING_STATUS_ON status_text = ('on' if conv_event.new_status == status_on else 'off') text = '{} turned {} joining by link.'.format(user.first_name, status_text) return MessageWidget(conv_event.timestamp, text, datetimefmt, show_date=is_new_day, watermark_users=watermark_users) else: text = 'Unknown conversation event' return MessageWidget(conv_event.timestamp, text, datetimefmt, show_date=is_new_day, watermark_users=watermark_users) class ConversationEventListWalker(urwid.ListWalker): POSITION_LOADING = 'loading' WATERMARK_FAST_SEARCH_ITEMS = 10 def __init__(self, coroutine_queue, conversation, datetimefmt): self._coroutine_queue = coroutine_queue self._conversation = conversation self._is_scrolling = False self._is_loading = False self._first_loaded = False self._datetimefmt = datetimefmt self._watermarked_events = {} self._focus_position = (conversation.events[-1].id_ if conversation.events else self.POSITION_LOADING) self._conversation.on_event.add_observer(self._handle_event) self._conversation.on_watermark_notification.add_observer( self._on_watermark_notification ) super().__init__() def _handle_event(self, conv_event): if not self._is_scrolling: self.set_focus(conv_event.id_) else: self._modified() async def _load(self): try: conv_events = await self._conversation.get_events( self._conversation.events[0].id_ ) except (IndexError, hangups.NetworkError): conv_events = [] if not conv_events: self._first_loaded = True if self._focus_position == self.POSITION_LOADING and conv_events: self.set_focus(conv_events[-1].id_) else: self._modified() self._refresh_watermarked_events() self._is_loading = False def __getitem__(self, position): if position == self.POSITION_LOADING: if self._first_loaded: return urwid.Text('No more messages', align='center') else: if not self._is_loading and not self._first_loaded: self._is_loading = True self._coroutine_queue.put(self._load()) return urwid.Text('Loading...', align='center') try: prev_position = self._get_position(position, prev=True) if prev_position == self.POSITION_LOADING: prev_event = None else: prev_event = self._conversation.get_event(prev_position) return MessageWidget.from_conversation_event( self._conversation, self._conversation.get_event(position), prev_event, self._datetimefmt, watermark_users=self._watermarked_events.get(position, None) ) except KeyError: raise IndexError('Invalid position: {}'.format(position)) @staticmethod def _find_watermark_event(timestamps, timestamp): back_idx = ConversationEventListWalker.WATERMARK_FAST_SEARCH_ITEMS for i, t in list(enumerate(reversed(timestamps[-back_idx:]))): if t <= timestamp: return len(timestamps) - i - 1 return bisect(timestamps[:-back_idx], timestamp) - 1 def _refresh_watermarked_events(self): self._watermarked_events.clear() timestamps = [x.timestamp for x in self._conversation.events] for user_id in self._conversation.watermarks: user = self._conversation.get_user(user_id) if user.is_self: continue timestamp = self._conversation.watermarks[user_id] if timestamp < timestamps[0]: continue event_idx = ConversationEventListWalker._find_watermark_event( timestamps, timestamp ) if event_idx >= 0: event_pos = self._conversation.events[event_idx].id_ if event_pos not in self._watermarked_events: self._watermarked_events[event_pos] = set() self._watermarked_events[event_pos].add(user) def _on_watermark_notification(self, _): self._refresh_watermarked_events() self._modified() def _get_position(self, position, prev=False): if position == self.POSITION_LOADING: if prev: raise IndexError('Reached last position') else: return self._conversation.events[0].id_ else: ev = self._conversation.next_event(position, prev=prev) if ev is None: if prev: return self.POSITION_LOADING else: raise IndexError('Reached first position') else: return ev.id_ def next_position(self, position): return self._get_position(position)
MIT License
ambron60/l-system-drawing
venv/Lib/site-packages/pip-9.0.1-py3.5.egg/pip/download.py
url_to_path
python
def url_to_path(url): assert url.startswith('file:'), ( "You can only turn file: urls into filenames (not %r)" % url) _, netloc, path, _, _ = urllib_parse.urlsplit(url) if netloc: netloc = '\\\\' + netloc path = urllib_request.url2pathname(netloc + path) return path
Convert a file: URL to a path.
https://github.com/ambron60/l-system-drawing/blob/3a4ecface1d862b87acd58ff2d5303cd4475370b/venv/Lib/site-packages/pip-9.0.1-py3.5.egg/pip/download.py#L442-L456
from __future__ import absolute_import import cgi import email.utils import getpass import json import logging import mimetypes import os import platform import re import shutil import sys import tempfile try: import ssl HAS_TLS = True except ImportError: HAS_TLS = False from pip._vendor.six.moves.urllib import parse as urllib_parse from pip._vendor.six.moves.urllib import request as urllib_request import pip from pip.exceptions import InstallationError, HashMismatch from pip.models import PyPI from pip.utils import (splitext, rmtree, format_size, display_path, backup_dir, ask_path_exists, unpack_file, ARCHIVE_EXTENSIONS, consume, call_subprocess) from pip.utils.encoding import auto_decode from pip.utils.filesystem import check_path_owner from pip.utils.logging import indent_log from pip.utils.setuptools_build import SETUPTOOLS_SHIM from pip.utils.glibc import libc_ver from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner from pip.locations import write_delete_marker_file from pip.vcs import vcs from pip._vendor import requests, six from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response from pip._vendor.requests.utils import get_netrc_auth from pip._vendor.requests.structures import CaseInsensitiveDict from pip._vendor.requests.packages import urllib3 from pip._vendor.cachecontrol import CacheControlAdapter from pip._vendor.cachecontrol.caches import FileCache from pip._vendor.lockfile import LockError from pip._vendor.six.moves import xmlrpc_client __all__ = ['get_file_content', 'is_url', 'url_to_path', 'path_to_url', 'is_archive_file', 'unpack_vcs_link', 'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url', 'unpack_url'] logger = logging.getLogger(__name__) def user_agent(): data = { "installer": {"name": "pip", "version": pip.__version__}, "python": platform.python_version(), "implementation": { "name": platform.python_implementation(), }, } if data["implementation"]["name"] == 'CPython': data["implementation"]["version"] = platform.python_version() elif data["implementation"]["name"] == 'PyPy': if sys.pypy_version_info.releaselevel == 'final': pypy_version_info = sys.pypy_version_info[:3] else: pypy_version_info = sys.pypy_version_info data["implementation"]["version"] = ".".join( [str(x) for x in pypy_version_info] ) elif data["implementation"]["name"] == 'Jython': data["implementation"]["version"] = platform.python_version() elif data["implementation"]["name"] == 'IronPython': data["implementation"]["version"] = platform.python_version() if sys.platform.startswith("linux"): from pip._vendor import distro distro_infos = dict(filter( lambda x: x[1], zip(["name", "version", "id"], distro.linux_distribution()), )) libc = dict(filter( lambda x: x[1], zip(["lib", "version"], libc_ver()), )) if libc: distro_infos["libc"] = libc if distro_infos: data["distro"] = distro_infos if sys.platform.startswith("darwin") and platform.mac_ver()[0]: data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} if platform.system(): data.setdefault("system", {})["name"] = platform.system() if platform.release(): data.setdefault("system", {})["release"] = platform.release() if platform.machine(): data["cpu"] = platform.machine() if HAS_TLS and sys.version_info[:2] > (2, 6): data["openssl_version"] = ssl.OPENSSL_VERSION return "{data[installer][name]}/{data[installer][version]} {json}".format( data=data, json=json.dumps(data, separators=(",", ":"), sort_keys=True), ) class MultiDomainBasicAuth(AuthBase): def __init__(self, prompting=True): self.prompting = prompting self.passwords = {} def __call__(self, req): parsed = urllib_parse.urlparse(req.url) netloc = parsed.netloc.rsplit("@", 1)[-1] req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) username, password = self.passwords.get(netloc, (None, None)) if username is None: username, password = self.parse_credentials(parsed.netloc) if username is None and password is None: netrc_auth = get_netrc_auth(req.url) username, password = netrc_auth if netrc_auth else (None, None) if username or password: self.passwords[netloc] = (username, password) req = HTTPBasicAuth(username or "", password or "")(req) req.register_hook("response", self.handle_401) return req def handle_401(self, resp, **kwargs): if resp.status_code != 401: return resp if not self.prompting: return resp parsed = urllib_parse.urlparse(resp.url) username = six.moves.input("User for %s: " % parsed.netloc) password = getpass.getpass("Password: ") if username or password: self.passwords[parsed.netloc] = (username, password) resp.content resp.raw.release_conn() req = HTTPBasicAuth(username or "", password or "")(resp.request) new_resp = resp.connection.send(req, **kwargs) new_resp.history.append(resp) return new_resp def parse_credentials(self, netloc): if "@" in netloc: userinfo = netloc.rsplit("@", 1)[0] if ":" in userinfo: return userinfo.split(":", 1) return userinfo, None return None, None class LocalFSAdapter(BaseAdapter): def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): pathname = url_to_path(request.url) resp = Response() resp.status_code = 200 resp.url = request.url try: stats = os.stat(pathname) except OSError as exc: resp.status_code = 404 resp.raw = exc else: modified = email.utils.formatdate(stats.st_mtime, usegmt=True) content_type = mimetypes.guess_type(pathname)[0] or "text/plain" resp.headers = CaseInsensitiveDict({ "Content-Type": content_type, "Content-Length": stats.st_size, "Last-Modified": modified, }) resp.raw = open(pathname, "rb") resp.close = resp.raw.close return resp def close(self): pass class SafeFileCache(FileCache): def __init__(self, *args, **kwargs): super(SafeFileCache, self).__init__(*args, **kwargs) if not check_path_owner(self.directory): logger.warning( "The directory '%s' or its parent directory is not owned by " "the current user and the cache has been disabled. Please " "check the permissions and owner of that directory. If " "executing pip with sudo, you may want sudo's -H flag.", self.directory, ) self.directory = None def get(self, *args, **kwargs): if self.directory is None: return try: return super(SafeFileCache, self).get(*args, **kwargs) except (LockError, OSError, IOError): pass def set(self, *args, **kwargs): if self.directory is None: return try: return super(SafeFileCache, self).set(*args, **kwargs) except (LockError, OSError, IOError): pass def delete(self, *args, **kwargs): if self.directory is None: return try: return super(SafeFileCache, self).delete(*args, **kwargs) except (LockError, OSError, IOError): pass class InsecureHTTPAdapter(HTTPAdapter): def cert_verify(self, conn, url, verify, cert): conn.cert_reqs = 'CERT_NONE' conn.ca_certs = None class PipSession(requests.Session): timeout = None def __init__(self, *args, **kwargs): retries = kwargs.pop("retries", 0) cache = kwargs.pop("cache", None) insecure_hosts = kwargs.pop("insecure_hosts", []) super(PipSession, self).__init__(*args, **kwargs) self.headers["User-Agent"] = user_agent() self.auth = MultiDomainBasicAuth() retries = urllib3.Retry( total=retries, status_forcelist=[503], backoff_factor=0.25, ) if cache: secure_adapter = CacheControlAdapter( cache=SafeFileCache(cache, use_dir_lock=True), max_retries=retries, ) else: secure_adapter = HTTPAdapter(max_retries=retries) insecure_adapter = InsecureHTTPAdapter(max_retries=retries) self.mount("https://", secure_adapter) self.mount("http://", insecure_adapter) self.mount("file://", LocalFSAdapter()) for host in insecure_hosts: self.mount("https://{0}/".format(host), insecure_adapter) def request(self, method, url, *args, **kwargs): kwargs.setdefault("timeout", self.timeout) return super(PipSession, self).request(method, url, *args, **kwargs) def get_file_content(url, comes_from=None, session=None): if session is None: raise TypeError( "get_file_content() missing 1 required keyword argument: 'session'" ) match = _scheme_re.search(url) if match: scheme = match.group(1).lower() if (scheme == 'file' and comes_from and comes_from.startswith('http')): raise InstallationError( 'Requirements file %s references URL %s, which is local' % (comes_from, url)) if scheme == 'file': path = url.split(':', 1)[1] path = path.replace('\\', '/') match = _url_slash_drive_re.match(path) if match: path = match.group(1) + ':' + path.split('|', 1)[1] path = urllib_parse.unquote(path) if path.startswith('/'): path = '/' + path.lstrip('/') url = path else: resp = session.get(url) resp.raise_for_status() return resp.url, resp.text try: with open(url, 'rb') as f: content = auto_decode(f.read()) except IOError as exc: raise InstallationError( 'Could not open requirements file: %s' % str(exc) ) return url, content _scheme_re = re.compile(r'^(http|https|file):', re.I) _url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) def is_url(name): if ':' not in name: return False scheme = name.split(':', 1)[0].lower() return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
MIT License
semiunsupervisedlearning/dgms_for_semi-unsupervised_learning
src/models/template.py
m2.build_model
python
def build_model(self): self.create_placeholders() self.initialize_networks() """ TODO: Define model components and variables """ self.predictions = self.predict(self.x)
TODO: Define model components and variables
https://github.com/semiunsupervisedlearning/dgms_for_semi-unsupervised_learning/blob/a89c7be92403c3582f3bce534f982382f9733055/src/models/template.py#L28-L33
from __future__ import absolute_import from __future__ import division from __future__ import print_function from models.model import model import sys, os, pdb import numpy as np import utils.dgm as dgm import tensorflow as tf from tensorflow.contrib.tensorboard.plugins import projector class m2(model): def __init__(self, n_x, n_y, n_z=2, n_hid=[4], alpha=0.1, x_dist='Gaussian', nonlinearity=tf.nn.relu, batchnorm=False, l2_reg=0.3, mc_samples=1,ckpt=None): super(m2, self).__init__(n_x, n_y, n_z, n_hid, x_dist, nonlinearity, batchnorm, mc_samples, alpha, l2_reg, ckpt)
MIT License
ericgibert/supersid
supersid/noaa_flares.py
NOAA_flares.http_ngdc
python
def http_ngdc(self): file_name = "goes-xrs-report_{}.txt".format(self.day[:4]) if self.day[:4] != "2015" else "goes-xrs-report_2015_modifiedreplacedmissingrows.txt" file_path = path.join("..", "Private", file_name) if not path.isfile(file_path): try: url = path.join(self.ngdc_URL, file_name) txt = urllib.request.urlopen(url).read().decode() except (urllib.error.HTTPError, urllib.error.URLError) as err: print("Cannot retrieve the file", file_name) print("from URL:", url) print(err, "\n") else: with open(file_path, "wt") as fout: fout.write(txt) return file_path
Get the file for a past year from HTTP ngdc if not already saved Return the full path of the data file
https://github.com/ericgibert/supersid/blob/0f44a2c8286d2d5c60b8abc2b7f103357de02f78/supersid/noaa_flares.py#L66-L85
import urllib.request, urllib.error from os import path from datetime import datetime, date class NOAA_flares(object): ngdc_URL = "https://www.ngdc.noaa.gov/stp/space-weather/solar-data/solar-features/solar-flares/x-rays/goes/xrs/" def __init__(self, day): if isinstance(day, str): self.day = day[:8] elif isinstance(day, datetime) or isinstance(day, date): self.day = day.strftime('%Y%m%d') else: raise TypeError("Unknown date format - expecting str 'YYYYMMDD' or datetime/date") self.Tstamp = lambda HHMM: datetime.strptime(self.day + HHMM, "%Y%m%d%H%M") today = date.today() self.XRAlist = [] if today.year==int(self.day[:4]): self.ftp_NOAA() else: file_path = self.http_ngdc() with open(file_path, "rt") as fin: for line in fin: fields = line.split() if fields and fields[0][5:11]==self.day[2:]: if len(fields)==11: self.XRAlist.append(( fields[4], self.Tstamp(fields[1]), self.Tstamp(fields[2]), self.Tstamp(fields[3]), fields[5]+fields[6][0]+'.'+fields[6][1])) elif len(fields)==8: self.XRAlist.append(( "None", self.Tstamp(fields[1]), self.Tstamp(fields[2]), self.Tstamp(fields[3]), fields[4]+fields[5][0]+'.'+fields[5][1])) else: print("Please check this line format:") print(line)
MIT License
bread-and-pepper/django-userena
userena/views.py
activate
python
def activate(request, activation_key, template_name='userena/activate_fail.html', retry_template_name='userena/activate_retry.html', success_url=None, extra_context=None): try: if (not UserenaSignup.objects.check_expired_activation(activation_key) or not userena_settings.USERENA_ACTIVATION_RETRY): user = UserenaSignup.objects.activate_user(activation_key) if user: auth_user = authenticate(identification=user.email, check_password=False) login(request, auth_user) if userena_settings.USERENA_USE_MESSAGES: messages.success(request, _('Your account has been activated and you have been signed in.'), fail_silently=True) if success_url: redirect_to = success_url % {'username': user.username } else: redirect_to = reverse('userena_profile_detail', kwargs={'username': user.username}) return redirect(redirect_to) else: if not extra_context: extra_context = dict() return ExtraContextTemplateView.as_view(template_name=template_name, extra_context=extra_context)( request) else: if not extra_context: extra_context = dict() extra_context['activation_key'] = activation_key return ExtraContextTemplateView.as_view(template_name=retry_template_name, extra_context=extra_context)(request) except UserenaSignup.DoesNotExist: if not extra_context: extra_context = dict() return ExtraContextTemplateView.as_view(template_name=template_name, extra_context=extra_context)(request)
Activate a user with an activation key. The key is a SHA1 string. When the SHA1 is found with an :class:`UserenaSignup`, the :class:`User` of that account will be activated. After a successful activation the view will redirect to ``success_url``. If the SHA1 is not found, the user will be shown the ``template_name`` template displaying a fail message. If the SHA1 is found but expired, ``retry_template_name`` is used instead, so the user can proceed to :func:`activate_retry` to get a new activation key. :param activation_key: String of a SHA1 string of 40 characters long. A SHA1 is always 160bit long, with 4 bits per character this makes it --160/4-- 40 characters long. :param template_name: String containing the template name that is used when the ``activation_key`` is invalid and the activation fails. Defaults to ``userena/activate_fail.html``. :param retry_template_name: String containing the template name that is used when the ``activation_key`` is expired. Defaults to ``userena/activate_retry.html``. :param success_url: String containing the URL where the user should be redirected to after a successful activation. Will replace ``%(username)s`` with string formatting if supplied. If ``success_url`` is left empty, will direct to ``userena_profile_detail`` view. :param extra_context: Dictionary containing variables which could be added to the template context. Default to an empty dictionary.
https://github.com/bread-and-pepper/django-userena/blob/7dfb3d5d148127e32f217a62096d507266a3a83c/userena/views.py#L149-L221
from django.core.urlresolvers import reverse from django.shortcuts import redirect, get_object_or_404 from django.contrib.auth import authenticate, login, logout, REDIRECT_FIELD_NAME from django.contrib.auth import get_user_model from django.contrib.auth.forms import PasswordChangeForm from django.contrib.auth.views import logout as Signout from django.views.generic import TemplateView from django.views.generic.list import ListView from django.contrib import messages from django.core.exceptions import PermissionDenied from django.utils.translation import ugettext as _ from django.http import Http404, HttpResponseRedirect from userena.forms import (SignupForm, SignupFormOnlyEmail, AuthenticationForm, ChangeEmailForm, EditProfileForm) from userena.models import UserenaSignup from userena.decorators import secure_required from userena.utils import signin_redirect, get_profile_model, get_user_profile from userena import signals as userena_signals from userena import settings as userena_settings from guardian.decorators import permission_required_or_403 import warnings class ExtraContextTemplateView(TemplateView): extra_context = None def get_context_data(self, *args, **kwargs): context = super(ExtraContextTemplateView, self).get_context_data(*args, **kwargs) if self.extra_context: context.update(self.extra_context) return context post = TemplateView.get class ProfileListView(ListView): context_object_name='profile_list' page=1 paginate_by=50 template_name=userena_settings.USERENA_PROFILE_LIST_TEMPLATE extra_context=None def get_context_data(self, **kwargs): context = super(ProfileListView, self).get_context_data(**kwargs) try: page = int(self.request.GET.get('page', None)) except (TypeError, ValueError): page = self.page if userena_settings.USERENA_DISABLE_PROFILE_LIST and not self.request.user.is_staff: raise Http404 if not self.extra_context: self.extra_context = dict() context['page'] = page context['paginate_by'] = self.paginate_by context['extra_context'] = self.extra_context return context def get_queryset(self): profile_model = get_profile_model() queryset = profile_model.objects.get_visible_profiles(self.request.user).select_related() return queryset @secure_required def signup(request, signup_form=SignupForm, template_name='userena/signup_form.html', success_url=None, extra_context=None): if userena_settings.USERENA_DISABLE_SIGNUP: raise PermissionDenied if userena_settings.USERENA_WITHOUT_USERNAMES and (signup_form == SignupForm): signup_form = SignupFormOnlyEmail form = signup_form() if request.method == 'POST': form = signup_form(request.POST, request.FILES) if form.is_valid(): user = form.save() userena_signals.signup_complete.send(sender=None, user=user) if success_url: redirect_to = success_url else: redirect_to = reverse('userena_signup_complete', kwargs={'username': user.username}) if request.user.is_authenticated(): logout(request) if (userena_settings.USERENA_SIGNIN_AFTER_SIGNUP and not userena_settings.USERENA_ACTIVATION_REQUIRED): user = authenticate(identification=user.email, check_password=False) login(request, user) return redirect(redirect_to) if not extra_context: extra_context = dict() extra_context['form'] = form return ExtraContextTemplateView.as_view(template_name=template_name, extra_context=extra_context)(request) @secure_required
BSD 3-Clause New or Revised License
swissdatasciencecenter/renku-python
renku/cli/dataset.py
update
python
def update(names, creators, include, exclude, ref, delete, external): from renku.core.commands.dataset import update_datasets communicator = ClickCallback() update_datasets().with_communicator(communicator).build().execute( names=list(names), creators=creators, include=include, exclude=exclude, ref=ref, delete=delete, external=external, ) click.secho("OK", fg="green")
Updates files in dataset from a remote Git repo.
https://github.com/swissdatasciencecenter/renku-python/blob/5e43e2eff67cdf20fc2805799fe2822e23bc503d/renku/cli/dataset.py#L832-L846
import json from pathlib import Path import click import requests from rich.console import Console from rich.markdown import Markdown from renku.cli.utils.callback import ClickCallback from renku.core import errors from renku.core.commands.format.dataset_files import DATASET_FILES_COLUMNS, DATASET_FILES_FORMATS from renku.core.commands.format.dataset_tags import DATASET_TAGS_FORMATS from renku.core.commands.format.datasets import DATASETS_COLUMNS, DATASETS_FORMATS @click.group() def dataset(): @dataset.command("ls") @click.option("--format", type=click.Choice(DATASETS_FORMATS), default="tabular", help="Choose an output format.") @click.option( "-c", "--columns", type=click.STRING, default="id,name,title,version", metavar="<columns>", help="Comma-separated list of column to display: {}.".format(", ".join(DATASETS_COLUMNS.keys())), show_default=True, ) def list_dataset(format, columns): from renku.core.commands.dataset import list_datasets result = list_datasets().lock_dataset().build().execute(format=format, columns=columns) click.echo(result.output) @dataset.command() @click.argument("name") @click.option("-t", "--title", default=None, type=click.STRING, help="Title of the dataset.") @click.option("-d", "--description", default=None, type=click.STRING, help="Dataset's description.") @click.option( "-c", "--creator", "creators", default=None, multiple=True, help="Creator's name, email, and affiliation. Accepted format is 'Forename Surname <email> [affiliation]'.", ) @click.option( "-m", "--metadata", default=None, type=click.Path(exists=True, dir_okay=False), help="Custom metadata to be associated with the dataset.", ) @click.option("-k", "--keyword", default=None, multiple=True, type=click.STRING, help="List of keywords or tags.") def create(name, title, description, creators, metadata, keyword): from renku.core.commands.dataset import create_dataset communicator = ClickCallback() creators = creators or () custom_metadata = None if metadata: custom_metadata = json.loads(Path(metadata).read_text()) result = ( create_dataset() .with_communicator(communicator) .build() .execute( name=name, title=title, description=description, creators=creators, keywords=keyword, custom_metadata=custom_metadata, ) ) new_dataset = result.output click.echo(f'Use the name "{new_dataset.name}" to refer to this dataset.') click.secho("OK", fg="green") @dataset.command() @click.argument("name") @click.option("-t", "--title", default=None, type=click.STRING, help="Title of the dataset.") @click.option("-d", "--description", default=None, type=click.STRING, help="Dataset's description.") @click.option( "-c", "--creator", "creators", default=None, multiple=True, help="Creator's name, email, and affiliation. " "Accepted format is 'Forename Surname <email> [affiliation]'.", ) @click.option( "-m", "--metadata", default=None, type=click.Path(exists=True, dir_okay=False), help="Custom metadata to be associated with the dataset.", ) @click.option("-k", "--keyword", default=None, multiple=True, type=click.STRING, help="List of keywords or tags.") def edit(name, title, description, creators, metadata, keyword): from renku.core.commands.dataset import edit_dataset creators = creators or () keywords = keyword or () custom_metadata = None if metadata: custom_metadata = json.loads(Path(metadata).read_text()) result = ( edit_dataset() .build() .execute( name=name, title=title, description=description, creators=creators, keywords=keywords, skip_image_update=True, custom_metadata=custom_metadata, ) ) updated, no_email_warnings = result.output if not updated: click.echo( ( "Nothing to update. " "Check available fields with `renku dataset edit --help`\n\n" 'Hint: `renku dataset edit --title "new title"`' ) ) else: click.echo("Successfully updated: {}.".format(", ".join(updated.keys()))) if no_email_warnings: click.echo(ClickCallback.WARNING + "No email or wrong format for: " + ", ".join(no_email_warnings)) @dataset.command("show") @click.argument("name") def show(name): from renku.core.commands.dataset import show_dataset result = show_dataset().build().execute(name=name) ds = result.output click.echo(click.style("Name: ", bold=True, fg="magenta") + click.style(ds["name"], bold=True)) click.echo(click.style("Created: ", bold=True, fg="magenta") + (ds.get("created_at", "") or "")) creators = [] for creator in ds.get("creators", []): if creator["affiliation"]: creators.append(f"{creator['name']} <{creator['email']}> [{creator['affiliation']}]") else: creators.append(f"{creator['name']} <{creator['email']}>") click.echo(click.style("Creator(s): ", bold=True, fg="magenta") + ", ".join(creators)) if ds["keywords"]: click.echo(click.style("Keywords: ", bold=True, fg="magenta") + ", ".join(ds.get("keywords", ""))) if ds["version"]: click.echo(click.style("Version: ", bold=True, fg="magenta") + ds.get("version", "")) if ds["annotations"]: click.echo(click.style("Annotations: ", bold=True, fg="magenta")) click.echo(json.dumps(ds.get("annotations", ""), indent=2)) click.echo(click.style("Title: ", bold=True, fg="magenta") + click.style(ds.get("title", ""), bold=True)) click.echo(click.style("Description: ", bold=True, fg="magenta")) Console().print(Markdown(ds.get("description", "") or "")) @dataset.command() @click.argument("name") @click.argument("urls", nargs=-1) @click.option("-e", "--external", is_flag=True, help="Creates a link to external data.") @click.option("--force", is_flag=True, help="Allow adding otherwise ignored files.") @click.option("-o", "--overwrite", is_flag=True, help="Overwrite existing files.") @click.option("-c", "--create", is_flag=True, help="Create dataset if it does not exist.") @click.option( "-s", "--src", "--source", "sources", default=None, multiple=True, help="Path(s) within remote git repo to be added" ) @click.option( "-d", "--dst", "--destination", "destination", default="", help="Destination directory within the dataset path" ) @click.option("--ref", default=None, help="Add files from a specific commit/tag/branch.") def add(name, urls, external, force, overwrite, create, sources, destination, ref): from renku.core.commands.dataset import add_to_dataset communicator = ClickCallback() add_to_dataset().with_communicator(communicator).build().execute( urls=urls, name=name, external=external, force=force, overwrite=overwrite, create=create, sources=sources, destination=destination, ref=ref, ) click.secho("OK", fg="green") @dataset.command("ls-files") @click.argument("names", nargs=-1) @click.option( "--creators", help="Filter files which where authored by specific creators. Multiple creators are specified by comma.", ) @click.option("-I", "--include", default=None, multiple=True, help="Include files matching given pattern.") @click.option("-X", "--exclude", default=None, multiple=True, help="Exclude files matching given pattern.") @click.option("--format", type=click.Choice(DATASET_FILES_FORMATS), default="tabular", help="Choose an output format.") @click.option( "-c", "--columns", type=click.STRING, default="dataset_name,added,size,path,lfs", metavar="<columns>", help="Comma-separated list of column to display: {}.".format(", ".join(DATASET_FILES_COLUMNS.keys())), show_default=True, ) def ls_files(names, creators, include, exclude, format, columns): from renku.core.commands.dataset import list_files result = ( list_files() .lock_dataset() .build() .execute(datasets=names, creators=creators, include=include, exclude=exclude, format=format, columns=columns) ) click.echo(result.output) @dataset.command() @click.argument("name") @click.option("-I", "--include", multiple=True, help="Include files matching given pattern.") @click.option("-X", "--exclude", multiple=True, help="Exclude files matching given pattern.") @click.option("-y", "--yes", is_flag=True, help="Confirm unlinking of all files.") def unlink(name, include, exclude, yes): from renku.core.commands.dataset import file_unlink communicator = ClickCallback() file_unlink().with_communicator(communicator).build().execute(name=name, include=include, exclude=exclude, yes=yes) click.secho("OK", fg="green") @dataset.command("rm") @click.argument("name") def remove(name): from renku.core.commands.dataset import remove_dataset remove_dataset().build().execute(name) click.secho("OK", fg="green") @dataset.command("tag") @click.argument("name") @click.argument("tag") @click.option("-d", "--description", default="", help="A description for this tag") @click.option("--force", is_flag=True, help="Allow overwriting existing tags.") def tag(name, tag, description, force): from renku.core.commands.dataset import add_dataset_tag_command add_dataset_tag_command().build().execute(name=name, tag=tag, description=description, force=force) click.secho("OK", fg="green") @dataset.command("rm-tags") @click.argument("name") @click.argument("tags", nargs=-1) def remove_tags(name, tags): from renku.core.commands.dataset import remove_dataset_tags_command remove_dataset_tags_command().build().execute(name=name, tags=tags) click.secho("OK", fg="green") @dataset.command("ls-tags") @click.argument("name") @click.option("--format", type=click.Choice(DATASET_TAGS_FORMATS), default="tabular", help="Choose an output format.") def ls_tags(name, format): from renku.core.commands.dataset import list_tags_command result = list_tags_command().lock_dataset().build().execute(name=name, format=format) click.echo(result.output) def export_provider_argument(*param_decls, **attrs): def wrapper(f): from click import argument from renku.core.commands.providers import ProviderFactory providers = [k.lower() for k, p in ProviderFactory.providers().items() if p.supports_export] f = argument("provider", type=click.Choice(providers))(f) return f return wrapper def export_provider_options(*param_decls, **attrs): def wrapper(f): from click_option_group import optgroup from renku.core.commands.providers import ProviderFactory providers = [ (k, v) for k, v in ProviderFactory.providers().items() if v.supports_export and v.export_parameters() ] for i, (name, provider) in enumerate(providers): params = provider.export_parameters() for j, (param_name, (param_description, param_type)) in enumerate(params.items()): if j == 0: param_description = f"\b\n{param_description}\n " f = optgroup.option(f"--{param_name}", type=param_type, help=param_description)(f) name = f"{name} configuration" if i == len(providers) - 1: name = "\n " + name f = optgroup.group(name=name)(f) return f return wrapper @dataset.command("export") @click.argument("name") @export_provider_argument() @click.option("-p", "--publish", is_flag=True, help="Automatically publish exported dataset.") @click.option("-t", "--tag", help="Dataset tag to export") @export_provider_options() def export_(name, provider, publish, tag, **kwargs): from renku.core.commands.dataset import export_dataset try: communicator = ClickCallback() export_dataset().lock_dataset().with_communicator(communicator).build().execute( name=name, provider_name=provider, publish=publish, tag=tag, **kwargs ) except (ValueError, errors.InvalidAccessToken, errors.DatasetNotFound, requests.HTTPError) as e: raise click.BadParameter(e) click.secho("OK", fg="green") @dataset.command("import") @click.argument("uri") @click.option("--short-name", "--name", "name", default=None, help="A convenient name for dataset.") @click.option("-x", "--extract", is_flag=True, help="Extract files before importing to dataset.") @click.option("-y", "--yes", is_flag=True, help="Bypass download confirmation.") def import_(uri, name, extract, yes): from renku.core.commands.dataset import import_dataset communicator = ClickCallback() import_dataset().with_communicator(communicator).build().execute(uri=uri, name=name, extract=extract, yes=yes) click.secho(" " * 79 + "\r", nl=False) click.secho("OK", fg="green") @dataset.command("update") @click.argument("names", nargs=-1) @click.option( "--creators", help="Filter files which where authored by specific creators. Multiple creators are specified by comma.", ) @click.option("-I", "--include", default=None, multiple=True, help="Include files matching given pattern.") @click.option("-X", "--exclude", default=None, multiple=True, help="Exclude files matching given pattern.") @click.option("--ref", default=None, help="Update to a specific commit/tag/branch.") @click.option("--delete", is_flag=True, help="Delete local files that are deleted from remote.") @click.option("-e", "--external", is_flag=True, help="Update external data.")
Apache License 2.0
pinterest/slackminion
slackminion/plugins/core/user.py
UserManager.load_user_info
python
def load_user_info(self, user): pass
Loads additional user information and stores in user object
https://github.com/pinterest/slackminion/blob/5845e7a3e09c72305c06e8f0aa58db1882858aaa/slackminion/plugins/core/user.py#L58-L62
from slackminion.plugin.base import BasePlugin from . import version try: from . import commit except ImportError: commit = 'HEAD' class UserManager(BasePlugin): def on_load(self): self._dont_save = True self.users = {} self.admins = {} if 'bot_admins' in self._bot.config: self.admins = self._bot.config['bot_admins'] setattr(self._bot, 'user_manager', self) return super(UserManager, self).on_load() def get(self, userid): if userid in self.users: return self.users[userid] return None def get_by_username(self, username): res = [x for x in list(self.users.values()) if x.username == username] if len(res) > 0: return res[0] return None def set(self, user): self.log.debug("Loading user information for %s/%s", user.id, user.username) self.load_user_info(user) self.log.debug("Loading user rights for %s/%s", user.id, user.username) self.load_user_rights(user) self._add_user_to_cache(user) return user def _add_user_to_cache(self, user): if user.id not in list(self.users.keys()): self.users[user.id] = user self.log.debug("Added user: %s/%s", user.id, user.username)
MIT License
lightly-ai/lightly
lightly/models/modules/nn_memory_bank.py
NNMemoryBankModule.forward
python
def forward(self, output: torch.Tensor, update: bool = False): output, bank = super(NNMemoryBankModule, self).forward(output, update=update) bank = bank.to(output.device).t() output_normed = torch.nn.functional.normalize(output, dim=1) bank_normed = torch.nn.functional.normalize(bank, dim=1) similarity_matrix = torch.einsum("nd,md->nm", output_normed, bank_normed) index_nearest_neighbours = torch.argmax(similarity_matrix, dim=1) nearest_neighbours = torch.index_select(bank, dim=0, index=index_nearest_neighbours) return nearest_neighbours
Returns nearest neighbour of output tensor from memory bank Args: output: The torch tensor for which you want the nearest neighbour update: If `True` updated the memory bank by adding output to it
https://github.com/lightly-ai/lightly/blob/00820e5a60522effb3685a8d792f15e99770ea50/lightly/models/modules/nn_memory_bank.py#L41-L65
import torch from lightly.loss.memory_bank import MemoryBankModule class NNMemoryBankModule(MemoryBankModule): def __init__(self, size: int = 2 ** 16): super(NNMemoryBankModule, self).__init__(size)
MIT License
dagbldr/dagbldr
dagbldr/nodes/nodes.py
fixed_projection
python
def fixed_projection(list_of_inputs, list_of_input_dims, transform, name=None, pre=None, post=None, strict=True): assert len(list_of_input_dims) == len(list_of_inputs) conc_input_dim = sum(list_of_input_dims) conc_input = concatenate(list_of_inputs, axis=list_of_inputs[0].ndim - 1) np_W = transform.astype(_type) W = as_shared(np_W) if pre is None: np_pre = np.zeros((conc_input_dim,)).astype(_type) else: np_pre = pre t_pre = as_shared(np_pre) if post is None: np_post = np.zeros_like(np_W[0]).astype(_type) else: np_post = post t_post = as_shared(np_post) logger.info((conc_input_dim, np_W[0].shape)) return tensor.dot(conc_input + t_pre, W) + t_post
W_name = name + '_W' pre_name = name + '_pre' post_name = name + '_post' W_name = get_name() pre_name = get_name() post_name = get_name()
https://github.com/dagbldr/dagbldr/blob/3bfad37f4e425cfcc1fded91dbce733cf44c6cfd/dagbldr/nodes/nodes.py#L550-L582
import numpy as np from scipy import linalg from scipy.misc import factorial import theano from theano import tensor from theano.tensor.signal.downsample import max_pool_2d from theano.sandbox.rng_mrg import MRG_RandomStreams from ..utils import concatenate, as_shared from ..core import get_name, set_shared, get_shared from ..core import get_logger, get_type logger = get_logger() _type = get_type() def np_zeros(shape): return np.zeros(shape).astype(_type) def np_ones(shape): return np.ones(shape).astype(_type) def np_unit_uniform(shape, random_state): return np_uniform(shape, random_state, scale=1.) def np_uniform(shape, random_state, scale=0.08): if type(shape[0]) is tuple: shp = (shape[1][0], shape[0][0]) + shape[1][1:] else: shp = shape return random_state.uniform(low=-scale, high=scale, size=shp).astype(_type) def np_normal(shape, random_state, scale=0.01): if type(shape[0]) is tuple: shp = (shape[1][0], shape[0][0]) + shape[1][1:] else: shp = shape return (scale * random_state.randn(*shp)).astype(_type) def np_tanh_fan_uniform(shape, random_state, scale=1.): if type(shape[0]) is tuple: kern_sum = np.prod(shape[0]) + np.prod(shape[1]) shp = (shape[1][0], shape[0][0]) + shape[1][1:] else: kern_sum = np.sum(shape) shp = shape bound = scale * np.sqrt(6. / kern_sum) return random_state.uniform(low=-bound, high=bound, size=shp).astype(_type) def np_tanh_fan_normal(shape, random_state, scale=1.): if type(shape[0]) is tuple: kern_sum = np.prod(shape[0]) + np.prod(shape[1]) shp = (shape[1][0], shape[0][0]) + shape[1][1:] else: kern_sum = np.sum(shape) shp = shape var = scale * np.sqrt(2. / kern_sum) return var * random_state.randn(*shp).astype(_type) def np_sigmoid_fan_uniform(shape, random_state, scale=4.): return scale * np_tanh_fan_uniform(shape, random_state) def np_sigmoid_fan_normal(shape, random_state, scale=4.): return scale * np_tanh_fan_normal(shape, random_state) def np_variance_scaled_uniform(shape, random_state, scale=1.): if type(shape[0]) is tuple: shp = (shape[1][0], shape[0][0]) + shape[1][1:] kern_sum = np.prod(shape[0]) else: shp = shape kern_sum = shape[0] bound = scale * np.sqrt(3. / kern_sum) return random_state.uniform(low=-bound, high=bound, size=shp).astype(_type) def np_variance_scaled_randn(shape, random_state, scale=1.): if type(shape[0]) is tuple: shp = (shape[1][0], shape[0][0]) + shape[1][1:] kern_sum = np.prod(shape[0]) else: shp = shape kern_sum = shape[0] std = scale * np.sqrt(1. / kern_sum) return std * random_state.randn(*shp).astype(_type) def np_deep_scaled_uniform(shape, random_state, scale=1.): if type(shape[0]) is tuple: shp = (shape[1][0], shape[0][0]) + shape[1][1:] kern_sum = np.prod(shape[0]) else: shp = shape kern_sum = shape[0] bound = scale * np.sqrt(6. / kern_sum) return random_state.uniform(low=-bound, high=bound, size=shp).astype(_type) def np_deep_scaled_normal(shape, random_state, scale=1.): if type(shape[0]) is tuple: shp = (shape[1][0], shape[0][0]) + shape[1][1:] kern_sum = np.prod(shape[0]) else: shp = shape kern_sum = shape[0] std = scale * np.sqrt(2. / kern_sum) return std * random_state.randn(*shp).astype(_type) def np_ortho(shape, random_state, scale=1.): if type(shape[0]) is tuple: shp = (shape[1][0], shape[0][0]) + shape[1][1:] flat_shp = (shp[0], np.prd(shp[1:])) else: shp = shape flat_shp = shape g = random_state.randn(*flat_shp) U, S, VT = linalg.svd(g, full_matrices=False) res = U if U.shape == flat_shp else VT res = res.reshape(shp) return (scale * res).astype(_type) def np_identity(shape, random_state, scale=0.98): assert shape[0] == shape[1] res = np.eye(shape[0]) return (scale * res).astype(_type) def softplus_activation(X, eps=1E-4): return tensor.nnet.softplus(X) + eps def relu_activation(X): return X * (X > 0) def linear_activation(X): return X def softmax_activation(X): e_X = tensor.exp(X - X.max(axis=-1, keepdims=True)) out = e_X / e_X.sum(axis=-1, keepdims=True) return out def _dropout(X, random_state, on_off_switch, p=0.): if p > 0: theano_seed = random_state.randint(-2147462579, 2147462579) if theano_seed == 0: print("WARNING: prior layer got 0 seed. Reseeding...") theano_seed = random_state.randint(-2**32, 2**32) theano_rng = MRG_RandomStreams(seed=theano_seed) retain_prob = 1 - p if X.ndim == 2: X *= theano_rng.binomial( X.shape, p=retain_prob, dtype=_type) ** on_off_switch X /= retain_prob elif X.ndim == 3: X *= theano_rng.binomial(( X.shape[1], X.shape[2]), p=retain_prob, dtype=_type) ** on_off_switch X /= retain_prob else: raise ValueError("Unsupported tensor with ndim %s" % str(X.ndim)) return X def dropout(list_of_inputs, graph, name, on_off_switch, dropout_prob=0.5, random_state=None): theano_seed = random_state.randint(-2147462579, 2147462579) if theano_seed == 0: print("WARNING: prior layer got 0 seed. Reseeding...") theano_seed = random_state.randint(-2**32, 2**32) conc_input = concatenate(list_of_inputs, axis=list_of_inputs[0].ndim - 1) dropped = _dropout(conc_input, random_state, on_off_switch, p=dropout_prob) return dropped
BSD 3-Clause New or Revised License
allegro/ralph
contrib/dhcp_agent/dhcp_agent.py
DHCPConfigManager._set_new_configuration
python
def _set_new_configuration(self, config, path_to_config=None): try: with open_file_or_stdout_to_writing(path_to_config) as f: f.write(str(config)) self.logger.info( 'Configuration written to {}'.format( path_to_config or 'stdout' ) ) return True except IOError as e: self.logger.error( 'Could not write new DHCP configuration. Error message: %s', e ) return False
Writes (or prints) config file. Args: config (string): Raw (i.e., in plain text) configuration for DHCP server, path_to_config (string): The path to config file. Returns: bool: True if config is saved successfully, otherwise False
https://github.com/allegro/ralph/blob/04abcc9e78136ac1fb1ac119e2c660ee3baa396d/contrib/dhcp_agent/dhcp_agent.py#L406-L431
import atexit import contextlib import errno import fcntl import hashlib import logging import os import subprocess import sys import tempfile from optparse import OptionParser from logging import handlers as logging_handlers IS_PY3 = sys.version_info[0] == 3 if IS_PY3: from dbm import gnu as cache_db from urllib.request import urlopen, Request from urllib.parse import urlencode from urllib.error import HTTPError string_types = (str,) else: import dbm as cache_db from urllib import urlencode from urllib2 import urlopen, Request, HTTPError string_types = (basestring,) APP_DIR = os.path.expanduser('~/.ralph-dhcp-agent') PROTOS = ('http', 'https') PROTO_HTTP, PROTO_HTTPS = PROTOS CACHE_LAST_MODIFIED_PREFIX = 'http-last-modified' DEFAULT_DHCP_SERVICE_NAME = 'isc-dhcp-server' @contextlib.contextmanager def open_file_or_stdout_to_writing(filename=None): if filename in ['-', None]: handler = sys.stdout else: handler = open(filename, 'w') try: yield handler finally: if handler is not sys.stdout: handler.close() class Cache(object): def __init__(self, cache_path): if not os.path.exists(cache_path): os.makedirs(cache_path) self._cache = cache_db.open(os.path.join(cache_path, 'cache'), 'c') def get(self, key, prefix=''): url_hash = get_url_hash(key) try: last = self._cache[prefix + url_hash] except KeyError: last = None return last def set(self, key, value, prefix=''): url_hash = get_url_hash(key) self._cache[prefix + url_hash] = value def close(self): self._cache.close() def __enter__(self): return self def __exit__(self, *args, **kwargs): self.close() def get_url_hash(url): hash_url = hashlib.md5() hash_url.update(url.encode()) return hash_url.hexdigest() def convert_to_request_params(params): request_params = [] for key, value in params.items(): if isinstance(value, (list, tuple)): request_params.extend([(key, param) for param in value]) if isinstance(value, string_types): request_params.extend([(key, value)]) return request_params def _get_cmd_parser(): parser = OptionParser( description='Update configuration in DHCP server.', ) parser.add_option('-H', '--host', help='Ralph instance host.') parser.add_option('-k', '--key', help='Ralph API key.') parser.add_option( '-m', '--sections', type='choice', choices=DHCPConfigManager.DHCP_SECTIONS, action='append', help='Choose what part of config you want to upgrade. ' '[Default: all; Options: {}]'.format(', '.join(DHCPConfigManager.DHCP_SECTIONS)), ) parser.add_option( '-l', '--log-path', help='Path to log file. [Default: STDOUT]', default='STDOUT', ) parser.add_option( '-c', '--dhcp-config-entries', help='Path to the DHCP entries configuration file.', ) parser.add_option( '-n', '--dhcp-config-networks', help='Path to the DHCP networks configuration file.', ) parser.add_option( '-p', '--proto', type='choice', choices=PROTOS, default=PROTO_HTTPS ) parser.add_option( '-e', '--net-env', help='Only get config for the specified network environment.', ) parser.add_option( '-d', '--dc', help='Only get config for the specified data center.', ) parser.add_option( '-r', '--restart', help='Restart service after fetching config?', action='store_true', ) parser.add_option( '-v', '--verbose', help='Increase verbosity.', action='store_true', default=False, ) parser.add_option( '-s', '--dhcp-service-name', help='Name of the service to restart.', default=DEFAULT_DHCP_SERVICE_NAME ) return parser def _setup_logging(filename, verbose=False): log_size = os.getenv('DHCP_AGENT_LOG_SIZE', 20) logger = logging.getLogger(__file__) if verbose: logger.setLevel(logging.INFO) else: logger.setLevel(logging.WARNING) if not filename or filename in ('-', 'STDOUT'): handler = logging.StreamHandler() else: handler = logging_handlers.RotatingFileHandler( filename, maxBytes=(log_size * (1 << 20)), backupCount=5 ) fmt = logging.Formatter("[%(asctime)-12s.%(msecs)03d] " "%(levelname)-8s %(filename)s:%(lineno)d " "%(message)s", "%Y-%m-%d %H:%M:%S") handler.setFormatter(fmt) logger.addHandler(handler) return logger def _remove_application_lock(lockfile, logger): logger.info('Removing lock') os.unlink(lockfile) def _set_script_lock(logger): lockfile = '{}.lock'.format( os.path.join(tempfile.gettempdir(), os.path.split(sys.argv[0])[1]) ) f = os.open(lockfile, os.O_TRUNC | os.O_CREAT | os.O_RDWR) try: fcntl.lockf(f, fcntl.LOCK_EX | fcntl.LOCK_NB) os.write(f, '{}'.format(os.getpid()).encode()) atexit.register(_remove_application_lock, lockfile, logger) except IOError as e: if e.errno == errno.EAGAIN: logger.critical('Script already running.') sys.exit(2) raise def _get_cmd_params_from_parser(parser): return vars(parser.parse_args()[0]) def _check_params(params, error_callback): required_params = {'host', 'key'} diff = required_params - {k for k, v in params.items() if v} if diff: error_callback('ERROR: {} are required.'.format( ', '.join(['--{}'.format(d) for d in diff])) ) return False dc = params.get('dc') net_env = params.get('net_env') if (dc and net_env) or (not dc and not net_env): error_callback( 'ERROR: Only DC or ENV mode available.', ) return False sections = params.get('sections') if not sections: error_callback( 'ERROR: option `sections` are required.', ) return False return True class DHCPConfigManager(object): DHCP_SECTIONS = ('networks', 'entries') def __init__( self, logger, cache, host, key, sections, dc=None, net_env=None, verbose=False, restart=False, proto=PROTO_HTTPS, dhcp_config_entries=None, dhcp_config_networks=None, dhcp_service_name=DEFAULT_DHCP_SERVICE_NAME, **kwargs ): self.cache = cache self.logger = logger self.host = host self.key = key self.proto = proto self.can_restart_dhcp_server = restart self.dhcp_service_name = dhcp_service_name self.envs = net_env.split(',') if net_env else [] self.dcs = dc.split(',') if dc else [] self.sections = sections self.section_config_path_mapper = { 'entries': dhcp_config_entries, 'networks': dhcp_config_networks, } def download_and_apply_configuration(self): should_restart_dhcp_server = False successful_list = [] for section in self.sections: is_saved = False dhcp_config = self._get_configuration(section) if dhcp_config: is_saved = self._set_new_configuration( dhcp_config, self.section_config_path_mapper[section] ) successful_list.append(is_saved) should_restart_dhcp_server = any(successful_list) if self.can_restart_dhcp_server and should_restart_dhcp_server: self._restart_dhcp_server() self._send_sync_confirmation() def make_authorized_request(self, url): headers = {} last = self.cache.get(prefix=CACHE_LAST_MODIFIED_PREFIX, key=url) if last: self.logger.info( 'Using If-Modified-Since with value {} for url {}'.format( last, url ) ) headers['If-Modified-Since'] = last else: self.logger.info( 'Last modified not found in cache for url {}'.format(url) ) headers.update({'Authorization': 'Token {}'.format(self.key)}) return urlopen(Request(url, headers=headers)) def _get_configuration(self, mode): params = convert_to_request_params({'dc': self.dcs, 'env': self.envs}) url = '{}://{}/dhcp/{}/'.format( self.proto, self.host, mode, ) if params: params = urlencode(params) url += '?' + params configuration = None self.logger.info('Sending request to {}'.format(url)) try: response = self.make_authorized_request(url) except HTTPError as e: if e.code != 304: self.logger.error( 'Server returned %s status code with message "%s"', e.code, e.fp.read().decode() ) else: self.logger.info( 'Server return status 304 NOT MODIFIED. Nothing to do.' ) return False else: configuration = response.read() last_modified = response.headers.get('Last-Modified') self.logger.info( 'Storing Last-Modified for url {} with value {}'.format( url, last_modified ) ) self.cache.set( prefix=CACHE_LAST_MODIFIED_PREFIX, key=url, value=last_modified ) return configuration def _send_sync_confirmation(self): url = '{}://{}/dhcp/sync/'.format( self.proto, self.host, ) try: self.make_authorized_request(url) except HTTPError as e: self.logger.error( 'Could not send confirmation to Ralph. ' 'Server returned %s status code with message: %s', e.code, e.fp.read().decode() ) return False self.logger.info('Confirmation sent to {}.'.format(self.host)) return True
Apache License 2.0
google/nerfies
nerfies/utils.py
ValueMeter.reduce
python
def reduce(self, reduction='mean'): if reduction == 'mean': return np.mean(self._values) elif reduction == 'std': return np.std(self._values) elif reduction == 'last': return self._values[-1] else: raise ValueError(f'Unknown reduction {reduction}')
Reduces the tracked values.
https://github.com/google/nerfies/blob/04623e4474bde3459e2f7f2b5c9174d9e0faa7b1/nerfies/utils.py#L406-L415
import collections from concurrent import futures import contextlib import functools import time from typing import List, Union import jax from jax import tree_util import jax.numpy as jnp import numpy as np from scipy import interpolate from scipy.spatial import transform as scipy_transform import tqdm @functools.partial(jax.custom_jvp, nondiff_argnums=(1, 2, 3)) def safe_norm(x, axis=-1, keepdims=False, tol=1e-9): return jnp.linalg.norm(x, axis=axis, keepdims=keepdims) @safe_norm.defjvp def _safe_norm_jvp(axis, keepdims, tol, primals, tangents): x, = primals x_dot, = tangents safe_tol = max(tol, 1e-30) y = safe_norm(x, tol=safe_tol, axis=axis, keepdims=True) y_safe = jnp.maximum(y, tol) y_dot = jnp.where(y > safe_tol, x_dot * x / y_safe, jnp.zeros_like(x)) y_dot = jnp.sum(y_dot, axis=axis, keepdims=True) if not keepdims: y = jnp.squeeze(y, axis=axis) y_dot = jnp.squeeze(y_dot, axis=axis) return y, y_dot def jacobian_to_curl(jacobian): dfx_dy = jacobian[..., 0, 1] dfx_dz = jacobian[..., 0, 2] dfy_dx = jacobian[..., 1, 0] dfy_dz = jacobian[..., 1, 2] dfz_dx = jacobian[..., 2, 0] dfz_dy = jacobian[..., 2, 1] return jnp.stack([ dfz_dy - dfy_dz, dfx_dz - dfz_dx, dfy_dx - dfx_dy, ], axis=-1) def jacobian_to_div(jacobian): return jnp.trace(jacobian, axis1=-2, axis2=-1) - 3.0 def compute_psnr(mse): return -10. * jnp.log(mse) / jnp.log(10.) @jax.jit def robust_whiten(x): median = jnp.nanmedian(x) mad = jnp.nanmean(jnp.abs(x - median)) return (x - median) / mad def interpolate_codes( codes: Union[np.ndarray, List[np.ndarray]], num_samples: int, method='spline'): if isinstance(codes, list): codes = np.array(codes) t = np.arange(len(codes)) xs = np.linspace(0, len(codes) - 1, num_samples) if method == 'spline': cs = interpolate.CubicSpline(t, codes, bc_type='natural') return cs(xs).astype(np.float32) elif method == 'linear': interp = interpolate.interp1d(t, codes, axis=0) return interp(xs).astype(np.float32) raise ValueError(f'Unknown method {method!r}') def interpolate_cameras(cameras, num_samples: int): rotations = [] positions = [] for camera in cameras: rotations.append(camera.orientation) positions.append(camera.position) in_times = np.linspace(0, 1, len(rotations)) slerp = scipy_transform.Slerp( in_times, scipy_transform.Rotation.from_dcm(rotations)) spline = interpolate.CubicSpline(in_times, positions) out_times = np.linspace(0, 1, num_samples) out_rots = slerp(out_times).as_dcm() out_positions = spline(out_times) ref_camera = cameras[0] out_cameras = [] for out_rot, out_pos in zip(out_rots, out_positions): out_camera = ref_camera.copy() out_camera.orientation = out_rot out_camera.position = out_pos out_cameras.append(out_camera) return out_cameras def logit(y): return -jnp.log(1. / y - 1.) def affine_sigmoid(real, lo=0, hi=1): if not lo < hi: raise ValueError('`lo` (%g) must be < `hi` (%g)' % (lo, hi)) alpha = jax.nn.sigmoid(real) * (hi - lo) + lo return alpha def inv_affine_sigmoid(alpha, lo=0, hi=1): if not lo < hi: raise ValueError('`lo` (%g) must be < `hi` (%g)' % (lo, hi)) real = logit((alpha - lo) / (hi - lo)) return real def inv_softplus(y): return jnp.where(y > 87.5, y, jnp.log(jnp.expm1(y))) def affine_softplus(real, lo=0, ref=1): if not lo < ref: raise ValueError('`lo` (%g) must be < `ref` (%g)' % (lo, ref)) shift = inv_softplus(1.0) scale = (ref - lo) * jax.nn.softplus(real + shift) + lo return scale def inv_affine_softplus(scale, lo=0, ref=1): if not lo < ref: raise ValueError('`lo` (%g) must be < `ref` (%g)' % (lo, ref)) shift = inv_softplus(1.0) real = inv_softplus((scale - lo) / (ref - lo)) - shift return real def learning_rate_decay(step, init_lr=5e-4, decay_steps=100000, decay_rate=0.1): power = step / decay_steps return init_lr * (decay_rate**power) def log1p_safe(x): return jnp.log1p(jnp.minimum(x, 3e37)) def exp_safe(x): return jnp.exp(jnp.minimum(x, 87.5)) def expm1_safe(x): return jnp.expm1(jnp.minimum(x, 87.5)) def safe_sqrt(x, eps=1e-7): safe_x = jnp.where(x == 0, jnp.ones_like(x) * eps, x) return jnp.sqrt(safe_x) @jax.jit def general_loss_with_squared_residual(squared_x, alpha, scale): eps = jnp.finfo(jnp.float32).eps squared_scaled_x = squared_x / (scale ** 2) loss_two = 0.5 * squared_scaled_x loss_zero = log1p_safe(0.5 * squared_scaled_x) loss_neginf = -jnp.expm1(-0.5 * squared_scaled_x) loss_posinf = expm1_safe(0.5 * squared_scaled_x) beta_safe = jnp.maximum(eps, jnp.abs(alpha - 2.)) alpha_safe = jnp.where( jnp.greater_equal(alpha, 0.), jnp.ones_like(alpha), -jnp.ones_like(alpha)) * jnp.maximum(eps, jnp.abs(alpha)) loss_otherwise = (beta_safe / alpha_safe) * ( jnp.power(squared_scaled_x / beta_safe + 1., 0.5 * alpha) - 1.) loss = jnp.where( alpha == -jnp.inf, loss_neginf, jnp.where( alpha == 0, loss_zero, jnp.where( alpha == 2, loss_two, jnp.where(alpha == jnp.inf, loss_posinf, loss_otherwise)))) return scale * loss def shard(xs, device_count=None): if device_count is None: jax.local_device_count() return jax.tree_map(lambda x: x.reshape((device_count, -1) + x.shape[1:]), xs) def to_device(xs): return jax.tree_map(jnp.array, xs) def unshard(x, padding=0): if padding > 0: return x.reshape([x.shape[0] * x.shape[1]] + list(x.shape[2:]))[:-padding] else: return x.reshape([x.shape[0] * x.shape[1]] + list(x.shape[2:])) def normalize(x): return x / np.linalg.norm(x) def parallel_map(f, iterable, max_threads=None, show_pbar=False, **kwargs): with futures.ThreadPoolExecutor(max_threads) as executor: if show_pbar: results = tqdm.tqdm( executor.map(f, iterable, **kwargs), total=len(iterable)) else: results = executor.map(f, iterable, **kwargs) return list(results) def strided_subset(sequence, count): if count: stride = max(1, len(sequence) // count) return sequence[::stride] return sequence def tree_collate(list_of_pytrees): return tree_util.tree_multimap(lambda *x: np.stack(x), *list_of_pytrees) @contextlib.contextmanager def print_time(name): start = time.time() yield elapsed = time.time() - start print(f'[{name}] time elapsed: {elapsed:.04f}') class ValueMeter: def __init__(self): self._values = [] def reset(self): self._values.clear() def update(self, value): self._values.append(value)
Apache License 2.0
pettarin/yael
yael/opfmanifest.py
OPFManifest.mathml_items
python
def mathml_items(self): return list(e for e in self.items if e.has_property(OPFItem.V_MATHML))
The list of items with MathML elements. :rtype: list of :class:`yael.opfitem.OPFItem` objects
https://github.com/pettarin/yael/blob/e5611238eceeea3acda1a5ba757172650f8fe136/yael/opfmanifest.py#L233-L239
from yael.element import Element from yael.jsonable import JSONAble from yael.mediatype import MediaType from yael.namespace import Namespace from yael.opfitem import OPFItem import yael.util __author__ = "Alberto Pettarin" __copyright__ = "Copyright 2015, Alberto Pettarin (www.albertopettarin.it)" __license__ = "MIT" __version__ = "0.0.9" __email__ = "alberto@albertopettarin.it" __status__ = "Development" class OPFManifest(Element): A_ID = "id" E_ITEM = "item" def __init__(self, internal_path=None, obj=None, string=None): self.v_id = None self.items = [] Element.__init__( self, internal_path=internal_path, obj=obj, string=string) def parse_object(self, obj): self.v_id = obj.get(OPFManifest.A_ID) item_arr = yael.util.query_xpath( obj=obj, query="{0}:{1}", args=["o", OPFManifest.E_ITEM], nsp={"o": Namespace.OPF, "x": Namespace.XML}, required=None) for item in item_arr: try: item_parsed = OPFItem(obj=item) if ( (self.internal_path != None) and (item_parsed.v_href != None)): item_parsed.internal_path = yael.util.norm_join_parent( self.internal_path, item_parsed.v_href) self.add_item(item_parsed) except: pass def json_object(self, recursive=True): obj = { "id": self.v_id, "items": len(self.items), } if recursive: obj["items"] = JSONAble.safe(self.items) return obj def __len__(self): return len(self.items) def add_item(self, item): self.items.append(item) def item_by_id(self, v_id): lis = list(e for e in self.items if e.v_id == v_id) return yael.util.safe_first(lis) def items_by_media_type(self, v_media_type): return list(e for e in self.items if e.v_media_type == v_media_type) def item_by_internal_path(self, internal_path): lis = list(e for e in self.items if e.internal_path == internal_path) return yael.util.safe_first(lis) @property def v_id(self): return self.__v_id @v_id.setter def v_id(self, v_id): self.__v_id = v_id @property def items(self): return self.__items @items.setter def items(self, items): self.__items = items @property def cover_image_item(self): for item in self.items: if ( (item.v_properties != None) and (OPFItem.V_COVER_IMAGE in item.v_properties.split(" "))): return item return None @property def nav_document_item(self): for item in self.items: if ( (item.v_properties != None) and (OPFItem.V_NAV in item.v_properties.split(" "))): return item return None @property def audio_items(self): return list(e for e in self.items if MediaType.is_audio(e.v_media_type)) @property def content_document_items(self): return list(e for e in self.items if MediaType.is_content_document( e.v_media_type)) @property def font_items(self): return list(e for e in self.items if MediaType.is_font(e.media_type)) @property def image_items(self): return list(e for e in self.items if MediaType.is_image(e.v_media_type)) @property def video_items(self): return list(e for e in self.items if MediaType.is_video(e.v_media_type)) @property def scripted_items(self): return list(e for e in self.items if e.has_property(OPFItem.V_SCRIPTED)) @property
MIT License
milvlg/mmnas
train_itm.py
parse_args
python
def parse_args(): parser = argparse.ArgumentParser(description='MmNas Args') parser.add_argument('--RUN', dest='RUN_MODE', default='train', choices=['train', 'val', 'test'], help='{train, val, test}', type=str) parser.add_argument('--DATASET', dest='DATASET', default='flickr', choices=['coco', 'flickr'], help='{coco, flickr}', type=str) parser.add_argument('--SPLIT', dest='TRAIN_SPLIT', default='train', choices=['train'], help="set training split", type=str) parser.add_argument('--BS', dest='BATCH_SIZE', default=64, help='batch size during training', type=int) parser.add_argument('--NW', dest='NUM_WORKERS', default=4, help='fix random seed', type=int) parser.add_argument('--ARCH_PATH', dest='ARCH_PATH', default='./arch/train_itm.json', help='version control', type=str) parser.add_argument('--GENO_EPOCH', dest='GENO_EPOCH', default=0, help='version control', type=int) parser.add_argument('--GPU', dest='GPU', default='0, 1, 2, 3', help="gpu select, eg.'0, 1, 2'", type=str) parser.add_argument('--SEED', dest='SEED', default=None, help='fix random seed', type=int) parser.add_argument('--VERSION', dest='VERSION', default='train_itm', help='version control', type=str) parser.add_argument('--RESUME', dest='RESUME', default=False, help='resume training', action='store_true') parser.add_argument('--CKPT_PATH', dest='CKPT_FILE_PATH', help='load checkpoint path', type=str) args = parser.parse_args() return args
Parse input arguments
https://github.com/milvlg/mmnas/blob/552e29e666625819799ca22de324df2be50626cc/train_itm.py#L22-L80
import math, os, json, torch, datetime, random, copy, shutil, torchvision, tqdm import argparse, yaml import torch.nn as nn import torch.optim as Optim import torch.nn.functional as F import torch.utils.data as Data import numpy as np from collections import namedtuple from tkinter import _flatten import torch.distributed as dist import torch.multiprocessing as mp from torch.nn.parallel import DistributedDataParallel as DDP from mmnas.loader.load_data_itm import DataSet, DataSet_Neg from mmnas.loader.filepath_itm import Path from mmnas.model.full_itm import Net_Full from mmnas.utils.optimizer import WarmupOptimizer from mmnas.utils.sampler import SubsetDistributedSampler from mmnas.utils.itm_loss import BCE_Loss, Margin_Loss
Apache License 2.0
penetrate2hack/itwsv
My_script/Grabber/spider.py
htmlencode
python
def htmlencode(s): s = s.replace("&", "&amp;") s = s.replace("<", "&lt;") s = s.replace(">", "&gt;") s = s.replace("\"","&quot;") s = s.replace("'", "&apos;") return s
Escaping the HTML special characters
https://github.com/penetrate2hack/itwsv/blob/a8c3944aa0b44b6c4b520afef15f4a1a2ebe98ce/My_script/Grabber/spider.py#L80-L89
import urllib import time import re,sys,os,string from BeautifulSoup import BeautifulSoup,SoupStrainer from urllib2 import URLError, HTTPError COOKIEFILE = 'cookies.lwp' import os.path cj = None ClientCookie = None cookielib = None import cookielib import urllib2 urlopen = urllib2.urlopen cj = cookielib.LWPCookieJar() Request = urllib2.Request txdata = None refererUrl = "http://google.com/?q=you!" txheaders = {'User-agent' : 'Grabber/0.1 (X11; U; Linux i686; en-US; rv:1.7)', 'Referer' : refererUrl} allowed=['php','html','htm','xml','xhtml','xht','xhtm', 'asp','aspx','msp','mspx','php3','php4','php5','txt','shtm', 'shtml','phtm','phtml','jhtml','pl','jsp','cfm','cfml','do','py', 'js', 'css'] database = {} database_url = [] database_css = [] database_js = [] database_ext = [] local_url = [] dumb_params = [] root = "http://localhost" outSpiderFile = None _urlEncode = {} for i in range(256): _urlEncode[chr(i)] = '%%%02x' % i for c in string.letters + string.digits + '_,.-/': _urlEncode[c] = c _urlEncode[' '] = '+' def urlEncode(s): return string.join(map(lambda c: _urlEncode[c], list(s)), '') def urlDecode(s): mychr = chr atoi = string.atoi parts = string.split(string.replace(s, '+', ' '), '%') for i in range(1, len(parts)): part = parts[i] parts[i] = mychr(atoi(part[:2], 16)) + part[2:] return string.join(parts, '')
MIT License
clusterhq/flocker
benchmark/cluster.py
BenchmarkCluster.control_node_address
python
def control_node_address(self): return self._control_node_address
Return the control node IP address.
https://github.com/clusterhq/flocker/blob/eaa586248986d7cd681c99c948546c2b507e44de/benchmark/cluster.py#L183-L187
from functools import partial from ipaddr import IPAddress import json from jsonschema import FormatChecker, Draft4Validator import yaml from twisted.python.filepath import FilePath from flocker.apiclient import FlockerClient def validate_host_mapping(host_mapping): schema = { "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "additionalProperties": "true", } v = Draft4Validator(schema, format_checker=FormatChecker()) v.validate(host_mapping) def validate_cluster_configuration(cluster_config): schema = { "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "required": ["control_node", "agent_nodes"], "properties": { "control_node": { "type": "string", }, "agent_nodes": { "type": "array", "items": { "type": "object", "required": ["public", "private"], "properties": { "public": { "type": "string" }, "private": { "type": "string" }, }, }, }, }, "additionalProperties": "true", } v = Draft4Validator(schema, format_checker=FormatChecker()) v.validate(cluster_config) class BenchmarkCluster(object): def __init__( self, control_node_address, control_service_factory, public_addresses, default_volume_size, ): self._control_node_address = control_node_address self._control_service_factory = control_service_factory self._public_addresses = public_addresses self._default_volume_size = default_volume_size self._control_service = None @classmethod def from_acceptance_test_env(cls, env): control_node_address = env['FLOCKER_ACCEPTANCE_CONTROL_NODE'] certs = FilePath(env['FLOCKER_ACCEPTANCE_API_CERTIFICATES_PATH']) try: host_to_public = json.loads( env['FLOCKER_ACCEPTANCE_HOSTNAME_TO_PUBLIC_ADDRESS'] ) validate_host_mapping(host_to_public) public_addresses = { IPAddress(k): IPAddress(v) for k, v in host_to_public.items() } except ValueError as e: raise type(e)( ': '.join( ('FLOCKER_ACCEPTANCE_HOSTNAME_TO_PUBLIC_ADDRESS',) + e.args ) ) control_service = partial( FlockerClient, host=control_node_address, port=4523, ca_cluster_path=certs.child('cluster.crt'), cert_path=certs.child('user.crt'), key_path=certs.child('user.key') ) try: control_node_ip = IPAddress(control_node_address) except ValueError as e: raise type(e)( ': '.join(('FLOCKER_ACCEPTANCE_CONTROL_NODE',) + e.args) ) try: default_volume_size = int( env['FLOCKER_ACCEPTANCE_DEFAULT_VOLUME_SIZE'] ) except ValueError as e: raise type(e)( ': '.join(('FLOCKER_ACCEPTANCE_DEFAULT_VOLUME_SIZE',) + e.args) ) return cls( control_node_ip, control_service, public_addresses, default_volume_size, ) @classmethod def from_cluster_yaml(cls, path): with path.child('cluster.yml').open() as f: cluster = yaml.safe_load(f) validate_cluster_configuration(cluster) control_node_address = cluster['control_node'] public_addresses = { IPAddress(node['private']): IPAddress(node['public']) for node in cluster['agent_nodes'] } control_service = partial( FlockerClient, host=control_node_address, port=4523, ca_cluster_path=path.child('cluster.crt'), cert_path=path.child('user.crt'), key_path=path.child('user.key') ) return cls( IPAddress(control_node_address), control_service, public_addresses, None, )
Apache License 2.0
uber/bayesmark
bayesmark/experiment_aggregate.py
concat_experiments
python
def concat_experiments(all_experiments, ravel=False): all_perf = {} all_time = {} all_suggest = {} all_sigs = {} trial_counter = Counter() for (test_case, optimizer, uuid), (perf_ds, time_ds, suggest_ds, sig) in all_experiments: if ravel: raise NotImplementedError("ravel is deprecated. Just reshape in analysis steps instead.") case_key = (test_case, optimizer, trial_counter[(test_case, optimizer)]) trial_counter[(test_case, optimizer)] += 1 assert all(perf_ds[kk].dims == (ITER, SUGGEST) for kk in perf_ds) all_perf[case_key] = perf_ds all_time[case_key] = summarize_time(time_ds) all_suggest_curr = all_suggest.setdefault(test_case, {}) all_suggest_curr[case_key] = suggest_ds all_sigs.setdefault(test_case, []).append(sig) assert min(trial_counter.values()) == max(trial_counter.values()), "Uneven number of trials per test case" all_perf = xru.ds_concat(all_perf, dims=(TEST_CASE, METHOD, TRIAL)) assert all(all_perf[kk].dims == (ITER, SUGGEST, TEST_CASE, METHOD, TRIAL) for kk in all_perf) assert not any( np.any(np.isnan(all_perf[kk].values)) for kk in all_perf ), "Missing combinations of method and test case" all_time = xru.ds_concat(all_time, dims=(TEST_CASE, METHOD, TRIAL)) assert all(all_time[kk].dims == (ITER, TEST_CASE, METHOD, TRIAL) for kk in all_time) assert not any(np.any(np.isnan(all_time[kk].values)) for kk in all_time) assert xru.coord_compat((all_perf, all_time), (ITER, TEST_CASE, METHOD, TRIAL)) for test_case in all_suggest: all_suggest[test_case] = xru.ds_concat(all_suggest[test_case], dims=(TEST_CASE, METHOD, TRIAL)) assert all( all_suggest[test_case][kk].dims == (ITER, SUGGEST, TEST_CASE, METHOD, TRIAL) for kk in all_suggest[test_case] ) assert not any(np.any(np.isnan(all_suggest[test_case][kk].values)) for kk in all_suggest[test_case]) assert xru.coord_compat((all_perf, all_suggest[test_case]), (ITER, METHOD, TRIAL)) assert all_suggest[test_case].coords[TEST_CASE].shape == (1,), "test case should be singleton" return all_perf, all_time, all_suggest, all_sigs
Aggregate the Datasets from a series of experiments into combined Dataset. Parameters ---------- all_experiments : typing.Iterable Iterable (possible from a generator) with the Datasets from each experiment. Each item in `all_experiments` is a pair containing ``(meta_data, data)``. See `load_experiments` for details on these variables, ravel : bool If true, ravel all studies to store batch suggestions as if they were serial. Returns ------- all_perf : :class:`xarray:xarray.Dataset` DataArray containing all of the `perf_da` from the experiments. The meta-data from the experiments are included as extra dimensions. `all_perf` has dimensions ``(ITER, SUGGEST, TEST_CASE, METHOD, TRIAL)``. To convert the `uuid` to a trial, there must be an equal number of repetition in the experiments for each `TEST_CASE`, `METHOD` combination. Likewise, all of the experiments need an equal number of `ITER` and `SUGGEST`. If `ravel` is true, then the `SUGGEST` is singleton. all_time : :class:`xarray:xarray.Dataset` Dataset containing all of the `time_ds` from the experiments. The new dimensions are ``(ITER, TEST_CASE, METHOD, TRIAL)``. It has the same variables as `time_ds`. all_suggest : :class:`xarray:xarray.Dataset` DataArray containing all of the `suggest_ds` from the experiments. It has dimensions ``(ITER, SUGGEST, TEST_CASE, METHOD, TRIAL)``. all_sigs : dict(str, list(list(float))) Aggregate of all experiment signatures.
https://github.com/uber/bayesmark/blob/8c420e935718f0d6867153b781e58943ecaf2338/bayesmark/experiment_aggregate.py#L89-L166
import json import logging from collections import Counter import numpy as np import xarray as xr import bayesmark.constants as cc import bayesmark.xr_util as xru from bayesmark.cmd_parse import CmdArgs, agg_parser, parse_args, serializable_dict, unserializable_dict from bayesmark.constants import ARG_DELIM, EVAL_RESULTS, ITER, METHOD, SUGGEST, TEST_CASE, TIME_RESULTS, TRIAL from bayesmark.serialize import XRSerializer from bayesmark.signatures import analyze_signatures from bayesmark.sklearn_funcs import SklearnModel from bayesmark.util import str_join_safe logger = logging.getLogger(__name__) def validate_time(all_time): assert isinstance(all_time, xr.Dataset) assert all_time[cc.SUGGEST_PHASE].dims == (ITER,) assert all_time[cc.EVAL_PHASE].dims == (ITER, SUGGEST) assert all_time[cc.OBS_PHASE].dims == (ITER,) assert xru.is_simple_coords(all_time.coords, min_side=1) def validate_perf(perf_da): assert isinstance(perf_da, xr.Dataset) assert perf_da.dims == (ITER, SUGGEST) assert xru.is_simple_coords(perf_da.coords) assert not np.any(np.isnan(perf_da.values)) def validate_agg_perf(perf_da, min_trial=1): assert isinstance(perf_da, xr.DataArray) assert perf_da.dims == (ITER, SUGGEST, TEST_CASE, METHOD, TRIAL) assert xru.is_simple_coords(perf_da.coords, dims=(ITER, SUGGEST, TRIAL)) assert not np.any(np.isnan(perf_da.values)) assert perf_da.sizes[TRIAL] >= min_trial def summarize_time(all_time): validate_time(all_time) time_summary = xr.Dataset(coords=all_time.coords) time_summary[cc.SUGGEST_PHASE] = all_time[cc.SUGGEST_PHASE] time_summary[cc.OBS_PHASE] = all_time[cc.OBS_PHASE] time_summary[cc.EVAL_PHASE_MAX] = all_time[cc.EVAL_PHASE].max(dim=SUGGEST) time_summary[cc.EVAL_PHASE_SUM] = all_time[cc.EVAL_PHASE].sum(dim=SUGGEST) return time_summary
Apache License 2.0
pydota2/pydota2_archive
pydota2/lib/actions.py
Arguments.types
python
def types(cls, **kwargs): named = {name: type_._replace(id=Arguments._fields.index(name), name=name) for name, type_ in six.iteritems(kwargs)} return cls(**named)
Create an Arguments of the possible Types.
https://github.com/pydota2/pydota2_archive/blob/f33233ee5393a248e9845bb25ff234bf7ac9ff82/pydota2/lib/actions.py#L101-L105
from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import numbers import six from pydota2.lib import location class ArgumentType(collections.namedtuple( "ArgumentType", ["id", "name", "sizes", "fn"])): __slots__ = () def __str__(self): return "%s/%s %s" % (self.id, self.name, list(self.sizes)) @classmethod def enum(cls, options): return cls(-1, "<none>", (len(options),), lambda a: options[a[0]]) @classmethod def string(cls, string): return cls(-1, "<none>", (string,), lambda a: a[0]) @classmethod def scalar(cls, value): return cls(-1, "<none>", (value,), lambda a: a[0]) @classmethod def handle(cls, valid_handles): return cls(-1, "<none>", (len(valid_handles),), lambda a: a[0]) @classmethod def location(cls): return cls(-1, "<none>", (0, 0, 0), lambda a: location.Location(*a).floor()) @classmethod def tree_id(cls, valid_tree_ids): return cls(-1, "<none>", (len(valid_tree_ids),), lambda a: a[0]) @classmethod def spec(cls, id_, name, sizes): return cls(id_, name, sizes, None) class Arguments(collections.namedtuple("Arguments", [ "player_id", "location", "handle", "tree_id", "ability_str", "queued", "bool"])): ___slots__ = () @classmethod
Apache License 2.0
alliefitter/boto3_type_annotations
boto3_type_annotations_with_docs/boto3_type_annotations/sqs/client.py
Client.add_permission
python
def add_permission(self, QueueUrl: str, Label: str, AWSAccountIds: List, Actions: List): pass
Adds a permission to a queue for a specific `principal <http://docs.aws.amazon.com/general/latest/gr/glos-chap.html#P>`__ . This allows sharing access to the queue. When you create a queue, you have full control access rights for the queue. Only you, the owner of the queue, can grant or deny permissions to the queue. For more information about these permissions, see `Allow Developers to Write Messages to a Shared Queue <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-writing-an-sqs-policy.html#write-messages-to-shared-queue>`__ in the *Amazon Simple Queue Service Developer Guide* . .. note:: ``AddPermission`` writes an Amazon-SQS-generated policy. If you want to write your own policy, use `` SetQueueAttributes `` to upload your policy. For more information about writing your own policy, see `Using Custom Policies with the Amazon SQS Access Policy Language <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-creating-custom-policies.html>`__ in the *Amazon Simple Queue Service Developer Guide* . An Amazon SQS policy can have a maximum of 7 actions. Some actions take lists of parameters. These lists are specified using the ``param.n`` notation. Values of ``n`` are integers starting from 1. For example, a parameter list with two elements looks like this: ``&Attribute.1=first`` ``&Attribute.2=second`` .. note:: Cross-account permissions don't apply to this action. For more information, see see `Grant Cross-Account Permissions to a Role and a User Name <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-customer-managed-policy-examples.html#grant-cross-account-permissions-to-role-and-user-name>`__ in the *Amazon Simple Queue Service Developer Guide* . See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/sqs-2012-11-05/AddPermission>`_ **Request Syntax** :: response = client.add_permission( QueueUrl='string', Label='string', AWSAccountIds=[ 'string', ], Actions=[ 'string', ] ) :type QueueUrl: string :param QueueUrl: **[REQUIRED]** The URL of the Amazon SQS queue to which permissions are added. Queue URLs and names are case-sensitive. :type Label: string :param Label: **[REQUIRED]** The unique identification of the permission you\'re setting (for example, ``AliceSendMessage`` ). Maximum 80 characters. Allowed characters include alphanumeric characters, hyphens (``-`` ), and underscores (``_`` ). :type AWSAccountIds: list :param AWSAccountIds: **[REQUIRED]** The AWS account number of the `principal <http://docs.aws.amazon.com/general/latest/gr/glos-chap.html#P>`__ who is given permission. The principal must have an AWS account, but does not need to be signed up for Amazon SQS. For information about locating the AWS account identification, see `Your AWS Identifiers <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-making-api-requests.html#sqs-api-request-authentication>`__ in the *Amazon Simple Queue Service Developer Guide* . - *(string) --* :type Actions: list :param Actions: **[REQUIRED]** The action the client wants to allow for the specified principal. Valid values: the name of any action or ``*`` . For more information about these actions, see `Overview of Managing Access Permissions to Your Amazon Simple Queue Service Resource <http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-overview-of-managing-access.html>`__ in the *Amazon Simple Queue Service Developer Guide* . Specifying ``SendMessage`` , ``DeleteMessage`` , or ``ChangeMessageVisibility`` for ``ActionName.n`` also grants permissions for the corresponding batch versions of those actions: ``SendMessageBatch`` , ``DeleteMessageBatch`` , and ``ChangeMessageVisibilityBatch`` . - *(string) --* :returns: None
https://github.com/alliefitter/boto3_type_annotations/blob/2a88aa562b1aee6e8a6cc30402980884b3707fbb/boto3_type_annotations_with_docs/boto3_type_annotations/sqs/client.py#L11-L56
from typing import Optional from botocore.client import BaseClient from typing import Dict from botocore.paginate import Paginator from botocore.waiter import Waiter from typing import Union from typing import List class Client(BaseClient):
MIT License
taksau/gps-net
lib/dataloaders/visual_genome.py
load_graphs
python
def load_graphs(graphs_file, mode='train', num_im=-1, num_val_im=0, filter_empty_rels=True, filter_non_overlap=False): if mode not in ('train', 'val', 'test'): raise ValueError('{} invalid'.format(mode)) roi_h5 = h5py.File(graphs_file, 'r') data_split = roi_h5['split'][:] split = 2 if mode == 'test' else 0 split_mask = data_split == split split_mask &= roi_h5['img_to_first_box'][:] >= 0 if filter_empty_rels: split_mask &= roi_h5['img_to_first_rel'][:] >= 0 image_index = np.where(split_mask)[0] if num_im > -1: image_index = image_index[:num_im] if num_val_im > 0: if mode == 'val': image_index = image_index[:num_val_im] elif mode == 'train': image_index = image_index[num_val_im:] split_mask = np.zeros_like(data_split).astype(bool) split_mask[image_index] = True all_labels = roi_h5['labels'][:, 0] all_boxes = roi_h5['boxes_{}'.format(BOX_SCALE)][:] assert np.all(all_boxes[:, :2] >= 0) assert np.all(all_boxes[:, 2:] > 0) all_boxes[:, :2] = all_boxes[:, :2] - all_boxes[:, 2:] / 2 all_boxes[:, 2:] = all_boxes[:, :2] + all_boxes[:, 2:] im_to_first_box = roi_h5['img_to_first_box'][split_mask] im_to_last_box = roi_h5['img_to_last_box'][split_mask] im_to_first_rel = roi_h5['img_to_first_rel'][split_mask] im_to_last_rel = roi_h5['img_to_last_rel'][split_mask] _relations = roi_h5['relationships'][:] _relation_predicates = roi_h5['predicates'][:, 0] assert (im_to_first_rel.shape[0] == im_to_last_rel.shape[0]) assert (_relations.shape[0] == _relation_predicates.shape[0]) boxes = [] gt_classes = [] relationships = [] for i in range(len(image_index)): boxes_i = all_boxes[im_to_first_box[i]:im_to_last_box[i] + 1, :] gt_classes_i = all_labels[im_to_first_box[i]:im_to_last_box[i] + 1] if im_to_first_rel[i] >= 0: predicates = _relation_predicates[im_to_first_rel[i]:im_to_last_rel[i] + 1] obj_idx = _relations[im_to_first_rel[i]:im_to_last_rel[i] + 1] - im_to_first_box[i] assert np.all(obj_idx >= 0) assert np.all(obj_idx < boxes_i.shape[0]) rels = np.column_stack((obj_idx, predicates)) else: assert not filter_empty_rels rels = np.zeros((0, 3), dtype=np.int32) if filter_non_overlap: assert mode == 'train' inters = bbox_overlaps(boxes_i, boxes_i) rel_overs = inters[rels[:, 0], rels[:, 1]] inc = np.where(rel_overs > 0.0)[0] if inc.size > 0: rels = rels[inc] else: split_mask[image_index[i]] = 0 continue boxes.append(boxes_i) gt_classes.append(gt_classes_i) relationships.append(rels) return split_mask, boxes, gt_classes, relationships
Load the file containing the GT boxes and relations, as well as the dataset split :param graphs_file: HDF5 :param mode: (train, val, or test) :param num_im: Number of images we want :param num_val_im: Number of validation images :param filter_empty_rels: (will be filtered otherwise.) :param filter_non_overlap: If training, filter images that dont overlap. :return: image_index: numpy array corresponding to the index of images we're using boxes: List where each element is a [num_gt, 4] array of ground truth boxes (x1, y1, x2, y2) gt_classes: List where each element is a [num_gt] array of classes relationships: List where each element is a [num_r, 3] array of (box_ind_1, box_ind_2, predicate) relationships
https://github.com/taksau/gps-net/blob/dfbe63a793026b231b3cd60073aaa91a2ad4d06a/lib/dataloaders/visual_genome.py#L264-L362
import json import os import h5py import numpy as np import torch from PIL import Image from torch.utils.data import Dataset from torchvision.transforms import Resize, Compose, ToTensor, Normalize from dataloaders.blob import Blob from lib.fpn.box_intersections_cpu.bbox import bbox_overlaps from config import VG_IMAGES, IM_DATA_FN, VG_SGG_FN, VG_SGG_DICT_FN, BOX_SCALE, IM_SCALE, PROPOSAL_FN from dataloaders.image_transforms import SquarePad, Grayscale, Brightness, Sharpness, Contrast, RandomOrder, Hue, random_crop from collections import defaultdict from pycocotools.coco import COCO class VG(Dataset): def __init__(self, mode, roidb_file=VG_SGG_FN, dict_file=VG_SGG_DICT_FN, image_file=IM_DATA_FN, filter_empty_rels=True, num_im=-1, num_val_im=5000, filter_duplicate_rels=True, filter_non_overlap=True, use_proposals=False): if mode not in ('test', 'train', 'val'): raise ValueError("Mode must be in test, train, or val. Supplied {}".format(mode)) self.mode = mode self.roidb_file = roidb_file self.dict_file = dict_file self.image_file = image_file self.filter_non_overlap = filter_non_overlap self.filter_duplicate_rels = filter_duplicate_rels and self.mode == 'train' self.split_mask, self.gt_boxes, self.gt_classes, self.relationships = load_graphs( self.roidb_file, self.mode, num_im, num_val_im=num_val_im, filter_empty_rels=filter_empty_rels, filter_non_overlap=self.filter_non_overlap and self.is_train, ) self.filenames = load_image_filenames(image_file) self.filenames = [self.filenames[i] for i in np.where(self.split_mask)[0]] self.ind_to_classes, self.ind_to_predicates = load_info(dict_file) if use_proposals: print("Loading proposals", flush=True) p_h5 = h5py.File(PROPOSAL_FN, 'r') rpn_rois = p_h5['rpn_rois'] rpn_scores = p_h5['rpn_scores'] rpn_im_to_roi_idx = np.array(p_h5['im_to_roi_idx'][self.split_mask]) rpn_num_rois = np.array(p_h5['num_rois'][self.split_mask]) self.rpn_rois = [] for i in range(len(self.filenames)): rpn_i = np.column_stack(( rpn_scores[rpn_im_to_roi_idx[i]:rpn_im_to_roi_idx[i] + rpn_num_rois[i]], rpn_rois[rpn_im_to_roi_idx[i]:rpn_im_to_roi_idx[i] + rpn_num_rois[i]], )) self.rpn_rois.append(rpn_i) else: self.rpn_rois = None tform = [ SquarePad(), Resize(IM_SCALE), ToTensor(), Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), ] self.transform_pipeline = Compose(tform) @property def coco(self): anns = [] for i, (cls_array, box_array) in enumerate(zip(self.gt_classes, self.gt_boxes)): for cls, box in zip(cls_array.tolist(), box_array.tolist()): anns.append({ 'area': (box[3] - box[1] + 1) * (box[2] - box[0] + 1), 'bbox': [box[0], box[1], box[2] - box[0] + 1, box[3] - box[1] + 1], 'category_id': cls, 'id': len(anns), 'image_id': i, 'iscrowd': 0, }) fauxcoco = COCO() fauxcoco.dataset = { 'info': {'description': 'ayy lmao'}, 'images': [{'id': i} for i in range(self.__len__())], 'categories': [{'supercategory': 'person', 'id': i, 'name': name} for i, name in enumerate(self.ind_to_classes) if name != '__background__'], 'annotations': anns, } fauxcoco.createIndex() return fauxcoco @property def is_train(self): return self.mode.startswith('train') @classmethod def splits(cls, *args, **kwargs): train = cls('train', *args, **kwargs) val = cls('val', *args, **kwargs) test = cls('test', *args, **kwargs) return train, val, test def __getitem__(self, index): image_unpadded = Image.open(self.filenames[index]).convert('RGB') flipped = self.is_train and np.random.random() > 0.5 gt_boxes = self.gt_boxes[index].copy() if self.is_train: gt_boxes[:, [1, 3]] = gt_boxes[:, [1, 3]].clip( None, BOX_SCALE / max(image_unpadded.size) * image_unpadded.size[1]) gt_boxes[:, [0, 2]] = gt_boxes[:, [0, 2]].clip( None, BOX_SCALE / max(image_unpadded.size) * image_unpadded.size[0]) w, h = image_unpadded.size box_scale_factor = BOX_SCALE / max(w, h) if flipped: scaled_w = int(box_scale_factor * float(w)) image_unpadded = image_unpadded.transpose(Image.FLIP_LEFT_RIGHT) gt_boxes[:, [0, 2]] = scaled_w - gt_boxes[:, [2, 0]] img_scale_factor = IM_SCALE / max(w, h) if h > w: im_size = (IM_SCALE, int(w * img_scale_factor), img_scale_factor) elif h < w: im_size = (int(h * img_scale_factor), IM_SCALE, img_scale_factor) else: im_size = (IM_SCALE, IM_SCALE, img_scale_factor) gt_rels = self.relationships[index].copy() if self.filter_duplicate_rels: assert self.mode == 'train' old_size = gt_rels.shape[0] all_rel_sets = defaultdict(list) for (o0, o1, r) in gt_rels: all_rel_sets[(o0, o1)].append(r) gt_rels = [(k[0], k[1], np.random.choice(v)) for k,v in all_rel_sets.items()] gt_rels = np.array(gt_rels) entry = { 'img': self.transform_pipeline(image_unpadded), 'img_size': im_size, 'gt_boxes': gt_boxes, 'gt_classes': self.gt_classes[index].copy(), 'gt_relations': gt_rels, 'scale': IM_SCALE / BOX_SCALE, 'index': index, 'flipped': flipped, 'fn': self.filenames[index], } if self.rpn_rois is not None: entry['proposals'] = self.rpn_rois[index] assertion_checks(entry) return entry def __len__(self): return len(self.filenames) @property def num_predicates(self): return len(self.ind_to_predicates) @property def num_classes(self): return len(self.ind_to_classes) def assertion_checks(entry): im_size = tuple(entry['img'].size()) if len(im_size) != 3: raise ValueError("Img must be dim-3") c, h, w = entry['img'].size() if c != 3: raise ValueError("Must have 3 color channels") num_gt = entry['gt_boxes'].shape[0] if entry['gt_classes'].shape[0] != num_gt: raise ValueError("GT classes and GT boxes must have same number of examples") assert (entry['gt_boxes'][:, 2] >= entry['gt_boxes'][:, 0]).all() assert (entry['gt_boxes'] >= -1).all() def load_image_filenames(image_file, image_dir=VG_IMAGES): with open(image_file, 'r') as f: im_data = json.load(f) corrupted_ims = ['1592.jpg', '1722.jpg', '4616.jpg', '4617.jpg'] fns = [] for i, img in enumerate(im_data): basename = '{}.jpg'.format(img['image_id']) if basename in corrupted_ims: continue filename = os.path.join(image_dir, basename) if os.path.exists(filename): fns.append(filename) assert len(fns) == 108073 return fns
MIT License
geemaple/leetcode
leetcode/67.add-binary.py
Solution.addBinary
python
def addBinary(self, a, b): size = max(len(a), len(b)) addOn = 0 res = '' for i in range(size): digitA = int(a[~i]) if (i + 1 <= len(a)) else 0 digitB = int(b[~i]) if (i + 1 <= len(b)) else 0 number = digitA + digitB + addOn addOn = number // 2 res = str(number % 2) + res if addOn > 0: res = str(addOn) + res return res
:type a: str :type b: str :rtype: str
https://github.com/geemaple/leetcode/blob/68bc5032e1ee52c22ef2f2e608053484c487af54/leetcode/67.add-binary.py#L2-L25
class Solution(object):
MIT License
rhayes777/pyautofit
autofit/graphical/utils.py
psilog
python
def psilog(x: np.ndarray) -> np.ndarray: return special.digamma(x) - np.log(x)
psi(x) - log(x) needed when calculating E[ln[x]] when x is a Gamma variable
https://github.com/rhayes777/pyautofit/blob/1eb6819cc60df8f6fb7d03bd95eaf074409d9e49/autofit/graphical/utils.py#L206-L211
from functools import reduce from operator import mul from typing import ( Iterable, Tuple, TypeVar, Dict, NamedTuple, Optional, Union ) import numpy as np from scipy import special from scipy.linalg import block_diag from scipy.optimize import OptimizeResult from autofit.mapper.variable import Variable class Status(NamedTuple): success: bool = True messages: Tuple[str, ...] = () class FlattenArrays(dict): def __init__(self, dict_: Dict[Variable, Tuple[int, ...]]): super().__init__() self.update(dict_) self.splits = np.cumsum([ np.prod(s) for s in self.values()], dtype=int) self.inds = [ slice(i0, i1) for i0, i1 in zip(np.r_[0, self.splits[:-1]], self.splits)] self.sizes = { k: np.prod(s, dtype=int) for k, s in self.items()} @classmethod def from_arrays(cls, **arrays: Dict[str, np.ndarray]) -> "FlattenArrays": return cls(**{k: np.shape(arr) for k, arr in arrays.items()}) def flatten(self, arrays_dict: Dict[Variable, np.ndarray]) -> np.ndarray: assert all(np.shape(arrays_dict[k]) == shape for k, shape in self.items()) return np.concatenate([ np.ravel(arrays_dict[k]) for k in self.keys()]) def unflatten(self, arr: np.ndarray, ndim=None) -> Dict[str, np.ndarray]: arr = np.asanyarray(arr) if ndim is None: ndim = arr.ndim arrays = [ arr[(ind,) * ndim] for ind in self.inds] arr_shapes = [arr.shape[ndim:] for arr in arrays] return { k: arr.reshape(shape * ndim + arr_shape) if shape or arr_shape else arr.item() for (k, shape), arr_shape, arr in zip(self.items(), arr_shapes, arrays)} def flatten2d(self, values: Dict[Variable, np.ndarray]) -> np.ndarray: assert all(np.shape(values[k]) == shape * 2 for k, shape in self.items()) return block_diag(*( np.reshape(values[k], (n, n)) for k, n in self.sizes.items() )) unflatten2d = unflatten def __repr__(self): shapes = ", ".join(map("{0[0]}={0[1]}".format, self.items())) return f"{type(self).__name__}({shapes})" @property def size(self): return self.splits[-1] class OptResult(NamedTuple): mode: Dict[Variable, np.ndarray] hess_inv: Dict[Variable, np.ndarray] log_norm: float full_hess_inv: np.ndarray result: OptimizeResult status: Status = Status() def add_arrays(*arrays: np.ndarray) -> np.ndarray: b = np.broadcast(*arrays) return sum(a * np.size(a) / b.size for a in arrays) Axis = Optional[Union[bool, int, Tuple[int, ...]]] def aggregate(array: np.ndarray, axis: Axis = False, **kwargs) -> np.ndarray: if axis is False: return array else: return np.sum(array, axis=axis, **kwargs) def diag(array: np.ndarray, *ds: Tuple[int, ...]) -> np.ndarray: array = np.asanyarray(array) d1 = array.shape if ds: ds = (d1,) + ds else: ds = (d1, d1) out = np.zeros(sum(ds, ())) diag_inds = tuple(map(np.ravel, (i for d in ds for i in np.indices(d)))) out[diag_inds] = array.ravel() return out _M = TypeVar('_M') def prod(iterable: Iterable[_M], *arg: Tuple[_M]) -> _M: return reduce(mul, iterable, *arg) def r2_score(y_true, y_pred, axis=None): y_true = np.asanyarray(y_true) y_pred = np.asanyarray(y_pred) mse = np.square(y_true - y_pred).mean(axis=axis) var = y_true.var(axis=axis) return 1 - mse / var def propagate_uncertainty( cov: np.ndarray, jac: np.ndarray) -> np.ndarray: cov = np.asanyarray(cov) var_ndim = cov.ndim // 2 det_ndim = jac.ndim - var_ndim det_shape, var_shape = jac.shape[:det_ndim], jac.shape[det_ndim:] assert var_shape == cov.shape[:var_ndim] == cov.shape[var_ndim:] var_size = np.prod(var_shape, dtype=int) det_size = np.prod(det_shape, dtype=int) cov2d = cov.reshape((var_size, var_size)) jac2d = jac.reshape((det_size, var_size)) det_cov2d = np.linalg.multi_dot(( jac2d, cov2d, jac2d.T)) det_cov = det_cov2d.reshape(det_shape + det_shape) return det_cov
MIT License
phoenixdl/rising
rising/transforms/kernel.py
KernelTransform.create_kernel
python
def create_kernel(self) -> torch.Tensor: raise NotImplementedError
Create kernel for convolution
https://github.com/phoenixdl/rising/blob/1d15e161198da469a06a5995746e8f531772f7f3/rising/transforms/kernel.py#L90-L94
import math from typing import Callable, Sequence, Union import torch from rising.utils import check_scalar from .abstract import AbstractTransform __all__ = ["KernelTransform", "GaussianSmoothing"] class KernelTransform(AbstractTransform): def __init__( self, in_channels: int, kernel_size: Union[int, Sequence], dim: int = 2, stride: Union[int, Sequence] = 1, padding: Union[int, Sequence] = 0, padding_mode: str = "zero", keys: Sequence = ("data",), grad: bool = False, **kwargs ): super().__init__(grad=grad, **kwargs) self.in_channels = in_channels if check_scalar(kernel_size): kernel_size = [kernel_size] * dim self.kernel_size = kernel_size if check_scalar(stride): stride = [stride] * dim self.stride = stride if check_scalar(padding): padding = [padding] * dim * 2 self.padding = padding self.padding_mode = padding_mode self.keys = keys kernel = self.create_kernel() self.register_buffer("weight", kernel) self.groups = in_channels self.conv = self.get_conv(dim) @staticmethod def get_conv(dim) -> Callable: if dim == 1: return torch.nn.functional.conv1d elif dim == 2: return torch.nn.functional.conv2d elif dim == 3: return torch.nn.functional.conv3d else: raise TypeError("Only 1, 2 and 3 dimensions are supported. Received {}.".format(dim))
MIT License
kuri65536/python-for-android
python-modules/twisted/twisted/application/service.py
IServiceCollection.addService
python
def addService(service):
Add a child service. @type service: L{IService} @raise RuntimeError: Raised if the service has a child with the given name.
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-modules/twisted/twisted/application/service.py#L226-L233
from zope.interface import implements, Interface, Attribute from twisted.python.reflect import namedAny from twisted.python import components from twisted.internet import defer from twisted.persisted import sob from twisted.plugin import IPlugin class IServiceMaker(Interface): tapname = Attribute( "A short string naming this Twisted plugin, for example 'web' or " "'pencil'. This name will be used as the subcommand of 'twistd'.") description = Attribute( "A brief summary of the features provided by this " "Twisted application plugin.") options = Attribute( "A C{twisted.python.usage.Options} subclass defining the" "configuration options for this application.") def makeService(options): class ServiceMaker(object): implements(IPlugin, IServiceMaker) def __init__(self, name, module, description, tapname): self.name = name self.module = module self.description = description self.tapname = tapname def options(): def get(self): return namedAny(self.module).Options return get, options = property(*options()) def makeService(): def get(self): return namedAny(self.module).makeService return get, makeService = property(*makeService()) class IService(Interface): def setName(name): def setServiceParent(parent): def disownServiceParent(): def startService(): def stopService(): def privilegedStartService(): class Service: implements(IService) running = 0 name = None parent = None def __getstate__(self): dict = self.__dict__.copy() if dict.has_key("running"): del dict['running'] return dict def setName(self, name): if self.parent is not None: raise RuntimeError("cannot change name when parent exists") self.name = name def setServiceParent(self, parent): if self.parent is not None: self.disownServiceParent() parent = IServiceCollection(parent, parent) self.parent = parent self.parent.addService(self) def disownServiceParent(self): d = self.parent.removeService(self) self.parent = None return d def privilegedStartService(self): pass def startService(self): self.running = 1 def stopService(self): self.running = 0 class IServiceCollection(Interface): def getServiceNamed(name): def __iter__():
Apache License 2.0
keenlabs/keenclient-python
keen/Padding.py
removeRandomLenPadding
python
def removeRandomLenPadding(str, blocksize=AES_blocksize): pad_len = ord(str[-1]) assert pad_len < blocksize, 'padding error' assert pad_len < len(str), 'padding error' return str[:-pad_len]
ISO 10126 Padding (withdrawn, 2007): Remove Padding with random bytes + last byte equal to the number of padding bytes
https://github.com/keenlabs/keenclient-python/blob/0cd942152977ee37a0c828249c95cf3a27c8c80f/keen/Padding.py#L217-L223
__doc__ = ''' Padding methods for password based encryption I. Functions: appendPadding(str, blocksize=AES_blocksize, mode='CMS'): Pad (append padding to) string for use with symmetric encryption algorithm Input: (string) str - String to be padded (int) blocksize - block size of the encryption algorithm. Usually 8 or 16 bytes (string) mode - padding scheme one in (CMS, Bit, ZeroLen, Null, Space, Random) Return:(string) Padded string according to chosen padding mode removePadding(str, blocksize=AES_blocksize, mode='CMS'): Remove padding from string Input: (str) str - String to be padded (int) blocksize - block size of the algorithm. Usually 8 or 16 bytes (string) mode - padding scheme one in (CMS, Bit, ZeroLen, Null, Space, Random) Return:(string) Decrypted string without padding II. Blocksizes: DES (Triple DES), CAST5 and Blowfish have block size of 64 bits = 8 bytes DES_blocksize = 8 CAST5_blocksize = 8 Blowfish_blocksize = 8 AES has fixed block size of 128 bits = 16 bytes and this is the default blocksize AES_blocksize = 16 III. Mode: MODES ={ (0,'CMS') : 'Pad with bytes all of the same value as the number of padding bytes. Default mode used in Cryptographic Message Syntax (CMS as defined in RFC 5652, PKCS#5, PKCS#7 and RFC 1423 PEM)', (1,'Bit') : 'BitPadding: Pad with 0x80 (10000000) followed by zero (null) bytes. Described in ANSI X.923 and ISO/IEC 9797-1', (2,'ZeroLen') : 'Pad with zeroes except make the last byte equal to the number (length) of padding bytes', (3,'Null') : 'Pad with null bytes. Only for encrypting of text data.', (4,'Space') : 'Pad with spaces. Only for encrypting of text data.', (5,'Random') : 'ISO 10126 Padding (withdrawn in 2007): Pad with random bytes + last byte equal to the number of padding bytes' } CMS mode is the default one IV. Examples: Example 1: Add/Remove padding for message to be encrypted/decrypted with AES > from Padding import appendPadding, removePadding > msg = 'a'*20 > > padded_msg = appendPadding(msg) # 'Default blocksize is 16 bytes (128 bits) which is AES blocksize' > padded_msg, len(padded_msg) > msg = removePadding(padded_msg) > msg, len(msg) Example 2: Add/Remove padding for message to be encrypted/decrypted with DES (Triple DES), CAST5 or Blowfish > import Padding > msg = 'b'*20 > blocksize = Padding.DES_blocksize > "DES has fixed block size of %d bits = %d bytes" % (blocksize*8, blocksize) > padded_msg = Padding.appendPadding(msg, blocksize) > padded_msg, len(padded_msg) > msg = Padding.removePadding(padded_msg) > msg, len(msg) ''' DES_blocksize = 8 CAST5_blocksize = 8 Blowfish_blocksize = 8 AES_blocksize = 16 def paddingLength(str_len, blocksize=AES_blocksize): assert 0 < blocksize < 255, 'blocksize must be between 0 and 255' assert str_len > 0 , 'string length should be non-negative' 'If the last block is already full, append an extra block of padding' pad_len = blocksize - (str_len % blocksize) return pad_len def appendCMSPadding(str, blocksize=AES_blocksize): pad_len = paddingLength(len(str), blocksize) padding = (chr(pad_len) * pad_len) return str + padding def removeCMSPadding(str, blocksize=AES_blocksize): try: pad_len = ord(str[-1]) except TypeError: pad_len = str[-1] assert pad_len <= blocksize, 'padding error' assert pad_len <= len(str), 'padding error' return str[:-pad_len] def appendBitPadding(str, blocksize=AES_blocksize): pad_len = paddingLength(len(str), blocksize) - 1 padding = chr(0x80)+'\0'*pad_len return str + padding def removeBitPadding(str, blocksize=AES_blocksize): pad_len = 0 for char in str[::-1]: if char == '\0': pad_len += 1 else: break pad_len += 1 str = str[:-pad_len] return str def appendZeroLenPadding(str, blocksize=AES_blocksize): pad_len = paddingLength(len(str), blocksize) - 1 padding = '\0'*pad_len+chr(pad_len) return str + padding def removeZeroLenPadding(str, blocksize=AES_blocksize): try: pad_len = ord(str[-1]) except TypeError: pad_len = str[-1] assert pad_len < blocksize, 'padding error' assert pad_len < len(str), 'padding error' return str[:-pad_len] def appendNullPadding(str, blocksize=AES_blocksize): pad_len = paddingLength(len(str), blocksize) padding = '\0'*pad_len return str + padding def removeNullPadding(str, blocksize=AES_blocksize): pad_len = 0 for char in str[::-1]: if char == '\0': pad_len += 1 else: break str = str[:-pad_len] return str def appendSpacePadding(str, blocksize=AES_blocksize): pad_len = paddingLength(len(str), blocksize) padding = '\0'*pad_len return str + padding def removeSpacePadding(str, blocksize=AES_blocksize): pad_len = 0 for char in str[::-1]: if char == ' ': pad_len += 1 else: break str = str[:-pad_len] return str def appendRandomLenPadding(str, blocksize=AES_blocksize): pad_len = paddingLength(len(str), blocksize) - 1 from os import urandom padding = urandom(pad_len)+chr(pad_len) return str + padding
MIT License
windelbouwman/ppci
ppci/cli/wasmcompile.py
wasmcompile
python
def wasmcompile(args=None): args = parser.parse_args(args) with LogSetup(args) as log_setup: march = get_arch_from_args(args) wasm_module = read_wasm(args.wasm_file) args.wasm_file.close() ir_module = wasm_to_ir( wasm_module, march.info.get_type_info("ptr"), reporter=log_setup.reporter, ) do_compile([ir_module], march, log_setup.reporter, log_setup.args)
Compile wasm to native code
https://github.com/windelbouwman/ppci/blob/915c069e0667042c085ec42c78e9e3c9a5295324/ppci/cli/wasmcompile.py#L25-L38
import argparse from .base import base_parser, march_parser from .base import LogSetup, get_arch_from_args from .compile_base import compile_parser, do_compile from ..wasm import read_wasm, wasm_to_ir parser = argparse.ArgumentParser( description=__doc__, parents=[base_parser, march_parser, compile_parser] ) parser.add_argument( "wasm_file", metavar="wasm file", type=argparse.FileType("rb"), help="wasm file to compile", )
BSD 2-Clause Simplified License
facelessuser/sublime-markdown-popups
st3/mdpopups/coloraide/color/interpolate.py
Interpolator.__call__
python
def __call__(self, p):
Call the interpolator.
https://github.com/facelessuser/sublime-markdown-popups/blob/aeb7586da26fe46b7764cf1e2832336bc306195d/st3/mdpopups/coloraide/color/interpolate.py#L57-L58
import math from abc import ABCMeta, abstractmethod from collections.abc import Sequence, Mapping, Callable from collections import namedtuple from .. import util from ..spaces import Cylindrical, Angle class Lerp: def __init__(self, progress): self.progress = progress def __call__(self, a, b, t): return a + (b - a) * (t if not isinstance(self.progress, Callable) else self.progress(t)) class Piecewise(namedtuple('Piecewise', ['color', 'stop', 'progress', 'hue', 'premultiplied'])): __slots__ = () def __new__(cls, color, stop=None, progress=None, hue=util.DEF_HUE_ADJ, premultiplied=False): return super().__new__(cls, color, stop, progress, hue, premultiplied) class Interpolator(metaclass=ABCMeta): @abstractmethod def __init__(self): @abstractmethod
MIT License
aparo/django-elasticsearch
django_elasticsearch/manager.py
QuerySet.__call__
python
def __call__(self, q_obj=None, **query): if q_obj: self._where_clause = q_obj.as_js(self._document) query = QuerySet._transform_query(_doc_cls=self._document, **query) self._query.update(query) return self
Filter the selected documents by calling the :class:`~mongoengine.queryset.QuerySet` with a query. :param q_obj: a :class:`~mongoengine.queryset.Q` object to be used in the query; the :class:`~mongoengine.queryset.QuerySet` is filtered multiple times with different :class:`~mongoengine.queryset.Q` objects, only the last one will be used :param query: Django-style query keyword arguments
https://github.com/aparo/django-elasticsearch/blob/8fd25bd86b58cfc0d6490cfac08e4846ab4ddf97/django_elasticsearch/manager.py#L173-L187
from django.db import connections from django.db.models.manager import Manager as DJManager import re import copy from .utils import dict_keys_to_str try: from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist except ImportError: class ObjectDoesNotExist(Exception): pass class MultipleObjectsReturned(Exception): pass DoesNotExist = ObjectDoesNotExist __all__ = ['queryset_manager', 'Q', 'InvalidQueryError', 'InvalidCollectionError'] REPR_OUTPUT_SIZE = 20 class InvalidQueryError(Exception): pass class OperationError(Exception): pass class InvalidCollectionError(Exception): pass DoesNotExist = ObjectDoesNotExist RE_TYPE = type(re.compile('')) class Q(object): OR = '||' AND = '&&' OPERATORS = { 'eq': 'this.%(field)s == %(value)s', 'ne': 'this.%(field)s != %(value)s', 'gt': 'this.%(field)s > %(value)s', 'gte': 'this.%(field)s >= %(value)s', 'lt': 'this.%(field)s < %(value)s', 'lte': 'this.%(field)s <= %(value)s', 'lte': 'this.%(field)s <= %(value)s', 'in': '%(value)s.indexOf(this.%(field)s) != -1', 'nin': '%(value)s.indexOf(this.%(field)s) == -1', 'mod': '%(field)s %% %(value)s', 'all': ('%(value)s.every(function(a){' 'return this.%(field)s.indexOf(a) != -1 })'), 'size': 'this.%(field)s.length == %(value)s', 'exists': 'this.%(field)s != null', 'regex_eq': '%(value)s.test(this.%(field)s)', 'regex_ne': '!%(value)s.test(this.%(field)s)', } def __init__(self, **query): self.query = [query] def _combine(self, other, op): obj = Q() obj.query = ['('] + copy.deepcopy(self.query) + [op] obj.query += copy.deepcopy(other.query) + [')'] return obj def __or__(self, other): return self._combine(other, self.OR) def __and__(self, other): return self._combine(other, self.AND) def as_js(self, document): js = [] js_scope = {} for i, item in enumerate(self.query): if isinstance(item, dict): item_query = QuerySet._transform_query(document, **item) js.append(self._item_query_as_js(item_query, js_scope, i)) else: js.append(item) return pymongo.code.Code(' '.join(js), js_scope) def _item_query_as_js(self, item_query, js_scope, item_num): js = [] for i, (key, value) in enumerate(item_query.items()): op = 'eq' value_name = 'i%sf%s' % (item_num, i) if isinstance(value, dict): for j, (op, value) in enumerate(value.items()): op_value_name = '%so%s' % (value_name, j) value, operation_js = self._build_op_js(op, key, value, op_value_name) js_scope[op_value_name] = value js.append(operation_js) else: value, field_js = self._build_op_js(op, key, value, value_name) js_scope[value_name] = value js.append(field_js) return ' && '.join(js) def _build_op_js(self, op, key, value, value_name): if isinstance(value, RE_TYPE): if op.strip('$') == 'ne': op_js = Q.OPERATORS['regex_ne'] else: op_js = Q.OPERATORS['regex_eq'] else: op_js = Q.OPERATORS[op.strip('$')] operation_js = op_js % { 'field': key, 'value': value_name } return value, operation_js class InternalMetadata: def __init__(self, meta): self.object_name = meta["object_name"] class InternalModel: def __init__(self, document): self.document = document self._meta = InternalMetadata(document._meta) self.DoesNotExist = ObjectDoesNotExist class QuerySet(object): def __init__(self, document, collection): self._document = document self._collection_obj = collection self._accessed_collection = False self._query = {} self._where_clause = None self._loaded_fields = [] self._ordering = [] self.transform = TransformDjango() self._cursor_obj = None self._limit = None self._skip = None
BSD 3-Clause New or Revised License
knipknap/gelatin
Gelatin/util.py
generate_string_to_file
python
def generate_string_to_file(converter, input, output_file, format='xml', out_encoding='utf8'): with codecs.open(output_file, 'w', encoding=out_encoding) as thefile: result = generate_string(converter, input, format=format) thefile.write(result)
Like generate(), but reads the input from a string instead of from a file, and writes the output to the given output file. :type converter: compiler.Context :param converter: The compiled converter. :type input: str :param input: The string to convert. :type output_file: str :param output_file: The output filename. :type format: str :param format: The output format. :type out_encoding: str :param out_encoding: Character encoding of the output file. :rtype: str :return: The resulting output.
https://github.com/knipknap/gelatin/blob/d2afa85a48034d6ee34580e49e16542f31ad208e/Gelatin/util.py#L130-L154
import codecs from . import generator from .generator import Builder from .parser import Parser from .compiler import SyntaxCompiler def compile_string(syntax): return Parser().parse_string(syntax, SyntaxCompiler()) def compile(syntax_file, encoding='utf8'): return Parser().parse(syntax_file, SyntaxCompiler(), encoding=encoding) def generate(converter, input_file, format='xml', encoding='utf8'): with codecs.open(input_file, encoding=encoding) as thefile: return generate_string(converter, thefile.read(), format=format) def generate_to_file(converter, input_file, output_file, format='xml', in_encoding='utf8', out_encoding='utf8'): with codecs.open(output_file, 'w', encoding=out_encoding) as thefile: result = generate(converter, input_file, format=format, encoding=in_encoding) thefile.write(result) def generate_string(converter, input, format='xml'): serializer = generator.new(format) if serializer is None: raise TypeError('invalid output format ' + repr(format)) builder = Builder() converter.parse_string(input, builder) return builder.serialize(serializer)
MIT License
armmbed/greentea
src/htrun/host_tests_plugins/module_copy_stlink.py
HostTestPluginCopyMethod_Stlink.execute
python
def execute(self, capability, *args, **kwargs): result = False if self.check_parameters(capability, *args, **kwargs) is True: image_path = os.path.normpath(kwargs["image_path"]) if capability == "stlink": cmd = [self.ST_LINK_CLI, "-p", image_path, "0x08000000", "-V"] result = self.run_command(cmd) return result
Copy a firmware image to a deice using the ST-LINK-CLI. If the "capability" name is not 'stlink' this method will just fail. Args: capability: Capability name. args: Additional arguments. kwargs: Additional arguments. Returns: True if the copy succeeded, otherwise False.
https://github.com/armmbed/greentea/blob/1c67ffc4651af602d99a27be8d0fab3dbc32a34e/src/htrun/host_tests_plugins/module_copy_stlink.py#L43-L65
import os from .host_test_plugins import HostTestPluginBase class HostTestPluginCopyMethod_Stlink(HostTestPluginBase): name = "HostTestPluginCopyMethod_Stlink" type = "CopyMethod" capabilities = ["stlink"] required_parameters = ["image_path"] def __init__(self): HostTestPluginBase.__init__(self) def is_os_supported(self, os_name=None): if not os_name: os_name = self.host_os_support() if os_name and os_name.startswith("Windows"): return True return False def setup(self, *args, **kwargs): self.ST_LINK_CLI = "ST-LINK_CLI.exe" return True
Apache License 2.0
bodenmillergroup/imctools
imctools/io/utils.py
sort_acquisition_channels
python
def sort_acquisition_channels(session: Session): for a in session.acquisitions.values(): ordered_dict = dict() ac_channels = list(a.channels.values()) ac_channels.sort(key=lambda x: x.order_number) for c in ac_channels: ordered_dict[c.id] = c a.channels = ordered_dict
Sort entries of acquisition channels dictionary by channel order number
https://github.com/bodenmillergroup/imctools/blob/5019836df5dc2b682722e39d5f9c62799b658929/imctools/io/utils.py#L112-L120
from __future__ import annotations import xml.etree.ElementTree as ET from typing import TYPE_CHECKING, Optional, Sequence import numpy as np import xtiff if TYPE_CHECKING: from imctools.data import Session SESSION_JSON_SUFFIX = "_session.json" SCHEMA_XML_SUFFIX = "_schema.xml" OME_TIFF_SUFFIX = "_ac.ome.tiff" META_CSV_SUFFIX = "_meta.csv" MCD_FILENDING = ".mcd" ZIP_FILENDING = ".zip" CSV_FILENDING = ".csv" SCHEMA_FILENDING = ".schema" def reshape_long_2_cyx( data: np.memmap, is_sorted: bool = True, shape: Optional[np.ndarray] = None, channel_indices: Optional[Sequence[int]] = None, ): if shape is None: shape = data[:, :2].max(axis=0) + 1 if np.prod(shape) > data.shape[0]: shape[1] -= 1 shape = shape.astype(int) if channel_indices is None: channel_indices = range(data.shape[1]) n_channels = len(channel_indices) if is_sorted: tmp_data = data[:, channel_indices] img = np.reshape(tmp_data[: (np.prod(shape)), :], [shape[1], shape[0], n_channels], order="C") img = img.swapaxes(0, 2) img = img.swapaxes(1, 2) return img else: return NotImplemented def get_ome_xml( img: np.ndarray, image_name: Optional[str], channel_names: Optional[Sequence[str]], big_endian: bool, pixel_size: Optional[float], pixel_depth: Optional[float], creator: Optional[str] = None, acquisition_date: Optional[str] = None, channel_fluors: Optional[Sequence[str]] = None, xml_metadata: Optional[str] = None, **ome_xml_kwargs, ) -> ET.ElementTree: size_t, size_z, size_c, size_y, size_x, size_s = img.shape element_tree = xtiff.get_ome_xml( img, image_name, channel_names, big_endian, pixel_size, pixel_depth, **ome_xml_kwargs ) if creator is not None: ome_element = element_tree.getroot() ome_element.set("Creator", creator) if acquisition_date is not None: image_element = element_tree.find("./Image") acquisition_date_element = ET.Element("AcquisitionDate") acquisition_date_element.text = acquisition_date image_element.insert(0, acquisition_date_element) if channel_fluors is not None: assert len(channel_fluors) == size_c channel_elements = element_tree.findall("./Image/Pixels/Channel") assert channel_elements is not None and len(channel_elements) == size_c for channel_element, channel_fluor in zip(channel_elements, channel_fluors): channel_element.set("Fluor", channel_fluor) if xml_metadata is not None: ome_element = element_tree.getroot() structured_annotations_element = ET.SubElement(ome_element, "StructuredAnnotations") xml_annotation_element = ET.SubElement(structured_annotations_element, "XMLAnnotation") xml_annotation_element.set("ID", "Annotation:0") xml_annotation_value_element = ET.SubElement(xml_annotation_element, "Value") original_metadata_element = ET.SubElement(xml_annotation_value_element, "OriginalMetadata") ET.SubElement(original_metadata_element, "Key").text = "MCD-XML" ET.SubElement(original_metadata_element, "Value").text = xml_metadata.replace("\r\n", "") return element_tree
MIT License
rucio/rucio
lib/rucio/web/rest/flaskapi/v1/auth.py
SAML.get
python
def get(self): headers = self.get_headers() headers.set('Content-Type', 'application/octet-stream') headers.set('Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate') headers.add('Cache-Control', 'post-check=0, pre-check=0') headers.set('Pragma', 'no-cache') if not EXTRA_MODULES['onelogin']: return "SAML not configured on the server side.", 400, headers saml_nameid = request.cookies.get('saml-nameid', default=None) vo = extract_vo(request.headers) account = request.headers.get('X-Rucio-Account', default=None) appid = request.headers.get('X-Rucio-AppID', default='unknown') ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) if saml_nameid: try: result = get_auth_token_saml(account, saml_nameid, appid, ip, vo=vo) except AccessDenied: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot authenticate to account {account} with given credentials', headers=headers ) if not result: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot authenticate to account {account} with given credentials', headers=headers ) headers.set('X-Rucio-Auth-Token', result['token']) headers.set('X-Rucio-Auth-Token-Expires', date_to_str(result['expires_at'])) return '', 200, headers SAML_PATH = config_get('saml', 'config_path') req = prepare_saml_request(request.environ, dict(request.args.items(multi=False))) auth = OneLogin_Saml2_Auth(req, custom_base_path=SAML_PATH) headers.set('X-Rucio-SAML-Auth-URL', auth.login()) return '', 200, headers
.. :quickref: SAML; :status 200: OK :status 401: Unauthorized :reqheader Rucio-VO: VO name as a string (Multi-VO only) :reqheader Rucio-Account: Account identifier as a string. :reqheader Rucio-Username: Username as a string. :reqheader Rucio-Password: Password as a string. :reqheader Rucio-AppID: Application identifier as a string. :resheader X-Rucio-SAML-Auth-URL: as a variable-length string header.
https://github.com/rucio/rucio/blob/6a6092798bb8220dec07328d0e3f7f42d1b931cd/lib/rucio/web/rest/flaskapi/v1/auth.py#L859-L918
from __future__ import print_function import base64 import logging import time from re import search from typing import TYPE_CHECKING from flask import Flask, Blueprint, request, Response, redirect, render_template from six.moves.urllib.parse import urlparse from werkzeug.datastructures import Headers from rucio.api.authentication import get_auth_token_user_pass, get_auth_token_gss, get_auth_token_x509, get_auth_token_ssh, get_ssh_challenge_token, validate_auth_token, get_auth_oidc, redirect_auth_oidc, get_token_oidc, refresh_cli_auth_token, get_auth_token_saml from rucio.common.config import config_get from rucio.common.exception import AccessDenied, IdentityError, CannotAuthenticate, CannotAuthorize from rucio.common.extra import import_extras from rucio.common.utils import date_to_str from rucio.web.rest.flaskapi.v1.common import check_accept_header_wrapper_flask, error_headers, extract_vo, generate_http_error_flask, ErrorHandlingMethodView if TYPE_CHECKING: from typing import Optional from rucio.web.rest.flaskapi.v1.common import HeadersType EXTRA_MODULES = import_extras(['onelogin']) if EXTRA_MODULES['onelogin']: from onelogin.saml2.auth import OneLogin_Saml2_Auth from rucio.web.ui.flask.common.utils import prepare_saml_request class UserPass(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers['Access-Control-Allow-Origin'] = request.environ.get('HTTP_ORIGIN') headers['Access-Control-Allow-Headers'] = request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS') headers['Access-Control-Allow-Methods'] = '*' headers['Access-Control-Allow-Credentials'] = 'true' headers['Access-Control-Expose-Headers'] = 'X-Rucio-Auth-Token' return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream']) def get(self): headers = self.get_headers() headers['Content-Type'] = 'application/octet-stream' headers['Cache-Control'] = 'no-cache, no-store, max-age=0, must-revalidate' headers.add('Cache-Control', 'post-check=0, pre-check=0') headers['Pragma'] = 'no-cache' vo = extract_vo(request.headers) account = request.headers.get('X-Rucio-Account', default=None) username = request.headers.get('X-Rucio-Username', default=None) password = request.headers.get('X-Rucio-Password', default=None) appid = request.headers.get('X-Rucio-AppID', default='unknown') ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) if not account or not username or not password: return generate_http_error_flask(401, CannotAuthenticate.__name__, 'Cannot authenticate without passing all required arguments', headers=headers) try: result = get_auth_token_user_pass(account, username, password, appid, ip, vo=vo) except AccessDenied: return generate_http_error_flask(401, CannotAuthenticate.__name__, f'Cannot authenticate to account {account} with given credentials', headers=headers) if not result: return generate_http_error_flask(401, CannotAuthenticate.__name__, f'Cannot authenticate to account {account} with given credentials', headers=headers) headers['X-Rucio-Auth-Token'] = result['token'] headers['X-Rucio-Auth-Token-Expires'] = date_to_str(result['expires_at']) return '', 200, headers class OIDC(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers['Access-Control-Allow-Origin'] = request.environ.get('HTTP_ORIGIN') headers['Access-Control-Allow-Headers'] = request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS') headers['Access-Control-Allow-Methods'] = '*' headers['Access-Control-Allow-Credentials'] = 'true' return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream']) def get(self): headers = self.get_headers() headers.set('Content-Type', 'application/octet-stream') headers.set('Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate') headers.add('Cache-Control', 'post-check=0, pre-check=0') headers.set('Pragma', 'no-cache') vo = extract_vo(request.headers) account = request.environ.get('HTTP_X_RUCIO_ACCOUNT', 'webui') auth_scope = request.environ.get('HTTP_X_RUCIO_CLIENT_AUTHORIZE_SCOPE', "") audience = request.environ.get('HTTP_X_RUCIO_CLIENT_AUTHORIZE_AUDIENCE', "") auto = request.environ.get('HTTP_X_RUCIO_CLIENT_AUTHORIZE_AUTO', False) issuer = request.environ.get('HTTP_X_RUCIO_CLIENT_AUTHORIZE_ISSUER', None) polling = request.environ.get('HTTP_X_RUCIO_CLIENT_AUTHORIZE_POLLING', False) refresh_lifetime = request.environ.get('HTTP_X_RUCIO_CLIENT_AUTHORIZE_REFRESH_LIFETIME', None) auto = (auto == 'True' or auto == 'true') polling = (polling == 'True' or polling == 'true') if refresh_lifetime == 'None': refresh_lifetime = None ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) try: kwargs = {'auth_scope': auth_scope, 'audience': audience, 'issuer': issuer, 'auto': auto, 'polling': polling, 'refresh_lifetime': refresh_lifetime, 'ip': ip} result = get_auth_oidc(account, vo=vo, **kwargs) except AccessDenied: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot get authentication URL from Rucio Authentication Server for account {account}', headers=headers ) if not result: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot get authentication URL from Rucio Authentication Server for account {account}', headers=headers ) headers.set('X-Rucio-OIDC-Auth-URL', result) return '', 200, headers class RedirectOIDC(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers.set('Access-Control-Allow-Origin', request.environ.get('HTTP_ORIGIN')) headers.set('Access-Control-Allow-Headers', request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS')) headers.set('Access-Control-Allow-Methods', '*') headers.set('Access-Control-Allow-Credentials', 'true') return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream', 'text/html']) def get(self): headers = self.get_headers() headers.set('Content-Type', 'text/html') headers.set('Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate') headers.add('Cache-Control', 'post-check=0, pre-check=0') headers.set('Pragma', 'no-cache') try: fetchtoken = (request.headers.get('X-Rucio-Client-Fetch-Token', default=None) == 'True') query_string = request.query_string.decode(encoding='utf-8') result = redirect_auth_oidc(query_string, fetchtoken) except AccessDenied: headers.extend(error_headers(CannotAuthenticate.__name__, 'Cannot authorize your access, please check your access credentials')) return render_template('auth_crash.html', crashtype='contact'), 401, headers except Exception as error: logging.exception("Internal Error") headers.extend(error_headers(error.__class__.__name__, str(error.args[0]))) return render_template('auth_crash.html', crashtype='internal_error'), 500, headers if not result: headers.extend(error_headers(CannotAuthenticate.__name__, 'Cannot finalize your token request, no authorization content returned from the auth server')) return render_template('auth_crash.html', crashtype='no_result'), 401, headers if fetchtoken: headers.set('Content-Type', 'application/octet-stream') headers.set('X-Rucio-Auth-Token', result) return '', 200, headers else: response = redirect(result, code=303) response.headers.extend(headers) return response class CodeOIDC(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers.set('Access-Control-Allow-Origin', request.environ.get('HTTP_ORIGIN')) headers.set('Access-Control-Allow-Headers', request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS')) headers.set('Access-Control-Allow-Methods', '*') headers.set('Access-Control-Allow-Credentials', 'true') return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream', 'text/html']) def get(self): headers = self.get_headers() headers.set('Content-Type', 'text/html') headers.set('Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate') headers.add('Cache-Control', 'post-check=0, pre-check=0') headers.set('Pragma', 'no-cache') query_string = request.query_string.decode(encoding='utf-8') ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) try: result = get_token_oidc(query_string, ip) except AccessDenied: headers.extend(error_headers(CannotAuthenticate.__name__, 'Cannot authorize your access, please check your access credentials')) return render_template('auth_crash.html', crashtype='contact'), 401, headers except Exception as error: logging.exception("Internal Error") headers.extend(error_headers(error.__class__.__name__, str(error.args[0]))) return render_template('auth_crash.html', crashtype='internal_error'), 500, headers if not result: headers.extend(error_headers(CannotAuthenticate.__name__, 'Cannot finalize your token request, no authorization content returned from the auth server')) return render_template('auth_crash.html', crashtype='no_result'), 401, headers if 'fetchcode' in result: return render_template('auth_granted.html', authcode=result['fetchcode']), 200, headers elif 'polling' in result and result['polling'] is True: return render_template('auth_granted.html', authcode='allok'), 200, headers else: headers.extend(error_headers('InvalidRequest', 'Cannot recognize and process your request')) return render_template('auth_crash.html', crashtype='bad_request'), 400, headers class TokenOIDC(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers.set('Access-Control-Allow-Origin', request.environ.get('HTTP_ORIGIN')) headers.set('Access-Control-Allow-Headers', request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS')) headers.set('Access-Control-Allow-Methods', '*') headers.set('Access-Control-Allow-Credentials', 'true') return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream']) def get(self): headers = self.get_headers() headers.set('Content-Type', 'application/octet-stream') headers.set('Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate') headers.add('Cache-Control', 'post-check=0, pre-check=0') headers.set('Pragma', 'no-cache') query_string = request.query_string.decode(encoding='utf-8') ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) try: result = get_token_oidc(query_string, ip) except AccessDenied: return generate_http_error_flask(401, CannotAuthorize.__name__, 'Cannot authorize token request.', headers=headers) if not result: return generate_http_error_flask(401, CannotAuthorize.__name__, 'Cannot authorize token request.', headers=headers) if 'token' in result and 'webhome' not in result: headers.set('X-Rucio-Auth-Token', result['token']['token']) headers.set('X-Rucio-Auth-Token-Expires', date_to_str(result['token']['expires_at'])) return '', 200, headers elif 'webhome' in result: webhome = result['webhome'] if webhome is None: headers.extend(error_headers(CannotAuthenticate.__name__, 'Cannot find your OIDC identity linked to any Rucio account')) headers.set('Content-Type', 'text/html') return render_template('auth_crash.html', crashtype='unknown_identity'), 401, headers domain = '.'.join(urlparse(webhome).netloc.split('.')[1:]) response = redirect(webhome, code=303) response.headers.extend(headers) response.set_cookie('x-rucio-auth-token', value=result['token']['token'], domain=domain, path='/') response.set_cookie('rucio-auth-token-created-at', value=str(time.time()), domain=domain, path='/') return response else: return '', 400, headers class RefreshOIDC(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers.set('Access-Control-Allow-Origin', request.environ.get('HTTP_ORIGIN')) headers.set('Access-Control-Allow-Headers', request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS')) headers.set('Access-Control-Allow-Methods', '*') headers.set('Access-Control-Allow-Credentials', 'true') headers.set('Access-Control-Expose-Headers', 'X-Rucio-Auth-Token') return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream']) def get(self): headers = self.get_headers() headers.set('Content-Type', 'application/octet-stream') headers.set('Cache-Control', 'no-cache, no-store, max-age=0, must-revalidate') headers.add('Cache-Control', 'post-check=0, pre-check=0') headers.set('Pragma', 'no-cache') vo = extract_vo(request.headers) account = request.headers.get('X-Rucio-Account', default=None) token = request.headers.get('X-Rucio-Auth-Token', default=None) if token is None or account is None: return generate_http_error_flask(401, CannotAuthorize.__name__, 'Cannot authorize token request.', headers=headers) try: result = refresh_cli_auth_token(token, account, vo=vo) except AccessDenied: return generate_http_error_flask(401, CannotAuthorize.__name__, 'Cannot authorize token request.', headers=headers) if result is not None and len(result) > 1: headers.set('X-Rucio-Auth-Token', str(result[0])) headers.set('X-Rucio-Auth-Token-Expires', str(result[1])) else: headers.set('X-Rucio-Auth-Token', '') headers.set('X-Rucio-Auth-Token-Expires', '') return '', 200, headers class GSS(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers['Access-Control-Allow-Origin'] = request.environ.get('HTTP_ORIGIN') headers['Access-Control-Allow-Headers'] = request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS') headers['Access-Control-Allow-Methods'] = '*' headers['Access-Control-Allow-Credentials'] = 'true' headers['Access-Control-Expose-Headers'] = 'X-Rucio-Auth-Token' return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream']) def get(self): headers = self.get_headers() headers['Content-Type'] = 'application/octet-stream' headers['Cache-Control'] = 'no-cache, no-store, max-age=0, must-revalidate' headers.add('Cache-Control', 'post-check=0, pre-check=0') headers['Pragma'] = 'no-cache' vo = extract_vo(request.headers) account = request.headers.get('X-Rucio-Account', default=None) gsscred = request.environ.get('REMOTE_USER') appid = request.headers.get('X-Rucio-AppID', default='unknown') ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) try: result = get_auth_token_gss(account, gsscred, appid, ip, vo=vo) except AccessDenied: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot authenticate to account {account} with given credentials', headers=headers ) if result is None: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot authenticate to account {account} with given credentials', headers=headers ) headers['X-Rucio-Auth-Token'] = result['token'] headers['X-Rucio-Auth-Token-Expires'] = date_to_str(result['expires_at']) return '', 200, headers class x509(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers['Access-Control-Allow-Origin'] = request.environ.get('HTTP_ORIGIN') headers['Access-Control-Allow-Headers'] = request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS') headers['Access-Control-Allow-Methods'] = '*' headers['Access-Control-Allow-Credentials'] = 'true' headers['Access-Control-Expose-Headers'] = 'X-Rucio-Auth-Token' return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream']) def get(self): headers = self.get_headers() headers['Content-Type'] = 'application/octet-stream' headers['Cache-Control'] = 'no-cache, no-store, max-age=0, must-revalidate' headers.add('Cache-Control', 'post-check=0, pre-check=0') headers['Pragma'] = 'no-cache' vo = extract_vo(request.headers) account = request.headers.get('X-Rucio-Account', default=None) dn = request.environ.get('SSL_CLIENT_S_DN') if not dn: return generate_http_error_flask(401, CannotAuthenticate.__name__, 'Cannot get DN', headers=headers) if not dn.startswith('/'): dn = '/' + '/'.join(dn.split(',')[::-1]) appid = request.headers.get('X-Rucio-AppID', default='unknown') ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) while True: if dn.endswith('/CN=limited proxy'): dn = dn[:-17] elif dn.endswith('/CN=proxy'): dn = dn[:-9] elif search('/CN=[0-9]*$', dn): dn = dn.rpartition('/')[0] else: break try: result = get_auth_token_x509(account, dn, appid, ip, vo=vo) except AccessDenied: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot authenticate to account {account} with given credentials', headers=headers ) except IdentityError: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'No default account set for {dn}', headers=headers ) if not result: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot authenticate to account {account} with given credentials', headers=headers ) headers['X-Rucio-Auth-Token'] = result['token'] headers['X-Rucio-Auth-Token-Expires'] = date_to_str(result['expires_at']) return '', 200, headers class SSH(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers['Access-Control-Allow-Origin'] = request.environ.get('HTTP_ORIGIN') headers['Access-Control-Allow-Headers'] = request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS') headers['Access-Control-Allow-Methods'] = '*' headers['Access-Control-Allow-Credentials'] = 'true' headers['Access-Control-Expose-Headers'] = 'X-Rucio-Auth-Token' return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream']) def get(self): headers = self.get_headers() headers['Content-Type'] = 'application/octet-stream' headers['Cache-Control'] = 'no-cache, no-store, max-age=0, must-revalidate' headers.add('Cache-Control', 'post-check=0, pre-check=0') headers['Pragma'] = 'no-cache' vo = extract_vo(request.headers) account = request.headers.get('X-Rucio-Account', default=None) signature = request.headers.get('X-Rucio-SSH-Signature', default=None) appid = request.headers.get('X-Rucio-AppID', default='unknown') ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) try: signature += '=' * ((4 - len(signature) % 4) % 4) signature = base64.b64decode(signature) except TypeError: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot authenticate to account {account} with malformed signature', headers=headers ) try: result = get_auth_token_ssh(account, signature, appid, ip, vo=vo) except AccessDenied: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot authenticate to account {account} with given credentials', headers=headers ) if not result: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot authenticate to account {account} with given credentials', headers=headers ) headers['X-Rucio-Auth-Token'] = result['token'] headers['X-Rucio-Auth-Token-Expires'] = date_to_str(result['expires_at']) return '', 200, headers class SSHChallengeToken(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers['Access-Control-Allow-Origin'] = request.environ.get('HTTP_ORIGIN') headers['Access-Control-Allow-Headers'] = request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS') headers['Access-Control-Allow-Methods'] = '*' headers['Access-Control-Allow-Credentials'] = 'true' headers['Access-Control-Expose-Headers'] = 'X-Rucio-Auth-Token' return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream']) def get(self): headers = self.get_headers() headers['Content-Type'] = 'application/octet-stream' headers['Cache-Control'] = 'no-cache, no-store, max-age=0, must-revalidate' headers.add('Cache-Control', 'post-check=0, pre-check=0') headers['Pragma'] = 'no-cache' vo = extract_vo(request.headers) account = request.headers.get('X-Rucio-Account', default=None) appid = request.headers.get('X-Rucio-AppID', default='unknown') ip = request.headers.get('X-Forwarded-For', default=request.remote_addr) result = get_ssh_challenge_token(account, appid, ip, vo=vo) if not result: return generate_http_error_flask( status_code=401, exc=CannotAuthenticate.__name__, exc_msg=f'Cannot generate challenge for account {account}', headers=headers ) headers['X-Rucio-SSH-Challenge-Token'] = result['token'] headers['X-Rucio-SSH-Challenge-Token-Expires'] = date_to_str(result['expires_at']) return '', 200, headers class SAML(ErrorHandlingMethodView): def get_headers(self) -> "Optional[HeadersType]": headers = Headers() headers.set('Access-Control-Allow-Origin', request.environ.get('HTTP_ORIGIN')) headers.set('Access-Control-Allow-Headers', request.environ.get('HTTP_ACCESS_CONTROL_REQUEST_HEADERS')) headers.set('Access-Control-Allow-Methods', '*') headers.set('Access-Control-Allow-Credentials', 'true') headers.set('Access-Control-Expose-Headers', 'X-Rucio-Auth-Token') return headers def options(self): return '', 200, self.get_headers() @check_accept_header_wrapper_flask(['application/octet-stream'])
Apache License 2.0
xknx/xknx
xknx/telegram/apci.py
DeviceDescriptorResponse.__init__
python
def __init__(self, descriptor: int = 0, value: int = 0) -> None: self.descriptor = descriptor self.value = value
Initialize a new instance of DeviceDescriptorResponse.
https://github.com/xknx/xknx/blob/87666cc9bd9da64a84305baeff84486097346111/xknx/telegram/apci.py#L679-L682
from __future__ import annotations from abc import ABC, abstractmethod from enum import Enum import struct from typing import ClassVar, cast from xknx.dpt import DPTArray, DPTBinary from xknx.exceptions import ConversionError from xknx.telegram.address import IndividualAddress def encode_cmd_and_payload( cmd: APCIService | APCIUserService | APCIExtendedService, encoded_payload: int = 0, appended_payload: bytes | None = None, ) -> bytes: if appended_payload is None: appended_payload = bytes() data = bytearray( [ (cmd.value >> 8) & 0xFF, (cmd.value & 0xFF) | (encoded_payload & DPTBinary.APCI_BITMASK), ] ) data.extend(appended_payload) return data class APCIService(Enum): GROUP_READ = 0x0000 GROUP_RESPONSE = 0x0040 GROUP_WRITE = 0x0080 INDIVIDUAL_ADDRESS_WRITE = 0x00C0 INDIVIDUAL_ADDRESS_READ = 0x0100 INDIVIDUAL_ADDRESS_RESPONSE = 0x140 ADC_READ = 0x0180 ADC_RESPONSE = 0x1C0 MEMORY_READ = 0x0200 MEMORY_RESPONSE = 0x0240 MEMORY_WRITE = 0x0280 USER_MESSAGE = 0x02C0 DEVICE_DESCRIPTOR_READ = 0x0300 DEVICE_DESCRIPTOR_RESPONSE = 0x0340 RESTART = 0x0380 ESCAPE = 0x03C0 class APCIUserService(Enum): USER_MEMORY_READ = 0x02C0 USER_MEMORY_RESPONSE = 0x02C1 USER_MEMORY_WRITE = 0x02C2 USER_MANUFACTURER_INFO_READ = 0x02C5 USER_MANUFACTURER_INFO_RESPONSE = 0x02C6 FUNCTION_PROPERTY_COMMAND = 0x02C7 FUNCTION_PROPERTY_STATE_READ = 0x02C8 FUNCTION_PROPERTY_STATE_RESPONSE = 0x02C9 class APCIExtendedService(Enum): AUTHORIZE_REQUEST = 0x03D1 AUTHORIZE_RESPONSE = 0x03D2 PROPERTY_VALUE_READ = 0x03D5 PROPERTY_VALUE_RESPONSE = 0x03D6 PROPERTY_VALUE_WRITE = 0x03D7 PROPERTY_DESCRIPTION_READ = 0x03D8 PROPERTY_DESCRIPTION_RESPONSE = 0x03D9 INDIVIDUAL_ADDRESS_SERIAL_READ = 0x03DC INDIVIDUAL_ADDRESS_SERIAL_RESPONSE = 0x03DD INDIVIDUAL_ADDRESS_SERIAL_WRITE = 0x03DE class APCI(ABC): CODE: ClassVar[APCIService | APCIUserService | APCIExtendedService] = cast( APCIService, None ) @abstractmethod def calculated_length(self) -> int: @abstractmethod def from_knx(self, raw: bytes) -> None: @abstractmethod def to_knx(self) -> bytes: def __eq__(self, other: object) -> bool: return self.__dict__ == other.__dict__ @staticmethod def resolve_apci(apci: int) -> APCI: service = apci & 0x03C0 if service == APCIService.GROUP_READ.value: return GroupValueRead() if service == APCIService.GROUP_WRITE.value: return GroupValueWrite() if service == APCIService.GROUP_RESPONSE.value: return GroupValueResponse() if service == APCIService.INDIVIDUAL_ADDRESS_WRITE.value: return IndividualAddressWrite() if service == APCIService.INDIVIDUAL_ADDRESS_READ.value: return IndividualAddressRead() if service == APCIService.INDIVIDUAL_ADDRESS_RESPONSE.value: return IndividualAddressResponse() if service == APCIService.ADC_READ.value: return ADCRead() if service == APCIService.ADC_RESPONSE.value: return ADCResponse() if service == APCIService.MEMORY_READ.value: return MemoryRead() if service == APCIService.MEMORY_WRITE.value: return MemoryWrite() if service == APCIService.MEMORY_RESPONSE.value: return MemoryResponse() if service == APCIService.USER_MESSAGE.value: if apci == APCIUserService.USER_MEMORY_READ.value: return UserMemoryRead() if apci == APCIUserService.USER_MEMORY_RESPONSE.value: return UserMemoryResponse() if apci == APCIUserService.USER_MEMORY_WRITE.value: return UserMemoryWrite() if apci == APCIUserService.USER_MANUFACTURER_INFO_READ.value: return UserManufacturerInfoRead() if apci == APCIUserService.USER_MANUFACTURER_INFO_RESPONSE.value: return UserManufacturerInfoResponse() if apci == APCIUserService.FUNCTION_PROPERTY_COMMAND.value: return FunctionPropertyCommand() if apci == APCIUserService.FUNCTION_PROPERTY_STATE_READ.value: return FunctionPropertyStateRead() if apci == APCIUserService.FUNCTION_PROPERTY_STATE_RESPONSE.value: return FunctionPropertyStateResponse() if service == APCIService.DEVICE_DESCRIPTOR_READ.value: return DeviceDescriptorRead() if service == APCIService.DEVICE_DESCRIPTOR_RESPONSE.value: return DeviceDescriptorResponse() if service == APCIService.RESTART.value: return Restart() if service == APCIService.ESCAPE.value: if apci == APCIExtendedService.AUTHORIZE_REQUEST.value: return AuthorizeRequest() if apci == APCIExtendedService.AUTHORIZE_RESPONSE.value: return AuthorizeResponse() if apci == APCIExtendedService.PROPERTY_VALUE_READ.value: return PropertyValueRead() if apci == APCIExtendedService.PROPERTY_VALUE_WRITE.value: return PropertyValueWrite() if apci == APCIExtendedService.PROPERTY_VALUE_RESPONSE.value: return PropertyValueResponse() if apci == APCIExtendedService.PROPERTY_DESCRIPTION_READ.value: return PropertyDescriptionRead() if apci == APCIExtendedService.PROPERTY_DESCRIPTION_RESPONSE.value: return PropertyDescriptionResponse() if apci == APCIExtendedService.INDIVIDUAL_ADDRESS_SERIAL_READ.value: return IndividualAddressSerialRead() if apci == APCIExtendedService.INDIVIDUAL_ADDRESS_SERIAL_RESPONSE.value: return IndividualAddressSerialResponse() if apci == APCIExtendedService.INDIVIDUAL_ADDRESS_SERIAL_WRITE.value: return IndividualAddressSerialWrite() raise ConversionError(f"Class not implemented for APCI {apci:#012b}.") class GroupValueRead(APCI): CODE = APCIService.GROUP_READ def calculated_length(self) -> int: return 1 def from_knx(self, raw: bytes) -> None: return def to_knx(self) -> bytes: return encode_cmd_and_payload(self.CODE) def __str__(self) -> str: return "<GroupValueRead />" class GroupValueWrite(APCI): CODE = APCIService.GROUP_WRITE def __init__(self, value: DPTBinary | DPTArray | None = None) -> None: self.value = value def calculated_length(self) -> int: if isinstance(self.value, DPTBinary): return 1 if isinstance(self.value, DPTArray): return 1 + len(self.value.value) raise TypeError() def from_knx(self, raw: bytes) -> None: if len(raw) == 2: self.value = DPTBinary(raw[1] & DPTBinary.APCI_BITMASK) else: self.value = DPTArray(raw[2:]) def to_knx(self) -> bytes: if isinstance(self.value, DPTBinary): return encode_cmd_and_payload(self.CODE, encoded_payload=self.value.value) if isinstance(self.value, DPTArray): return encode_cmd_and_payload( self.CODE, appended_payload=bytes(self.value.value) ) raise TypeError() def __str__(self) -> str: return f'<GroupValueWrite value="{self.value}" />' class GroupValueResponse(APCI): CODE = APCIService.GROUP_RESPONSE def __init__(self, value: DPTBinary | DPTArray | None = None) -> None: self.value = value def calculated_length(self) -> int: if isinstance(self.value, DPTBinary): return 1 if isinstance(self.value, DPTArray): return 1 + len(self.value.value) raise TypeError() def from_knx(self, raw: bytes) -> None: if len(raw) == 2: self.value = DPTBinary(raw[1] & DPTBinary.APCI_BITMASK) else: self.value = DPTArray(raw[2:]) def to_knx(self) -> bytes: if isinstance(self.value, DPTBinary): return encode_cmd_and_payload(self.CODE, encoded_payload=self.value.value) if isinstance(self.value, DPTArray): return encode_cmd_and_payload( self.CODE, appended_payload=bytes(self.value.value) ) raise TypeError() def __str__(self) -> str: return f'<GroupValueResponse value="{self.value}" />' class IndividualAddressWrite(APCI): CODE = APCIService.INDIVIDUAL_ADDRESS_WRITE def __init__( self, address: IndividualAddress | None = None, ) -> None: if address is None: address = IndividualAddress("0.0.0") self.address = address def calculated_length(self) -> int: return 3 def from_knx(self, raw: bytes) -> None: address_high, address_low = struct.unpack("!BB", raw[2:]) self.address = IndividualAddress((address_high, address_low)) def to_knx(self) -> bytes: return encode_cmd_and_payload( self.CODE, appended_payload=bytes(self.address.to_knx()) ) def __str__(self) -> str: return f'<IndividualAddressWrite address="{self.address}" />' class IndividualAddressRead(APCI): CODE = APCIService.INDIVIDUAL_ADDRESS_READ def calculated_length(self) -> int: return 1 def from_knx(self, raw: bytes) -> None: return def to_knx(self) -> bytes: return encode_cmd_and_payload(self.CODE) def __str__(self) -> str: return "<IndividualAddressRead />" class IndividualAddressResponse(APCI): CODE = APCIService.INDIVIDUAL_ADDRESS_RESPONSE def calculated_length(self) -> int: return 1 def from_knx(self, raw: bytes) -> None: return def to_knx(self) -> bytes: return encode_cmd_and_payload(self.CODE) def __str__(self) -> str: return "<IndividualAddressResponse />" class ADCRead(APCI): CODE = APCIService.ADC_READ def __init__(self, channel: int = 0, count: int = 0) -> None: self.channel = channel self.count = count def calculated_length(self) -> int: return 2 def from_knx(self, raw: bytes) -> None: channel, self.count = struct.unpack("!BB", raw[1:]) self.channel = channel & DPTBinary.APCI_BITMASK def to_knx(self) -> bytes: payload = struct.pack("!BB", self.channel, self.count) return encode_cmd_and_payload( self.CODE, encoded_payload=payload[0], appended_payload=payload[1:] ) def __str__(self) -> str: return f'<ADCRead channel="{self.channel}" count="{self.count}" />' class ADCResponse(APCI): CODE = APCIService.ADC_RESPONSE def __init__(self, channel: int = 0, count: int = 0, value: int = 0) -> None: self.channel = channel self.count = count self.value = value def calculated_length(self) -> int: return 4 def from_knx(self, raw: bytes) -> None: channel, self.count, self.value = struct.unpack("!BBH", raw[1:]) self.channel = channel & DPTBinary.APCI_BITMASK def to_knx(self) -> bytes: payload = struct.pack("!BBH", self.channel, self.count, self.value) return encode_cmd_and_payload( self.CODE, encoded_payload=payload[0], appended_payload=payload[1:] ) def __str__(self) -> str: return f'<ADCResponse channel="{self.channel}" count="{self.count}" value="{self.value}" />' class MemoryRead(APCI): CODE = APCIService.MEMORY_READ def __init__(self, address: int = 0, count: int = 0) -> None: self.address = address self.count = count def calculated_length(self) -> int: return 3 def from_knx(self, raw: bytes) -> None: count, self.address = struct.unpack("!BH", raw[1:]) self.count = count & DPTBinary.APCI_BITMASK def to_knx(self) -> bytes: if self.address < 0 or self.address >= 2 ** 16: raise ConversionError("Address out of range.") if self.count < 0 or self.count >= 2 ** 6: raise ConversionError("Count out of range.") payload = struct.pack("!BH", self.count, self.address) return encode_cmd_and_payload( self.CODE, encoded_payload=payload[0], appended_payload=payload[1:] ) def __str__(self) -> str: return f'<MemoryRead address="{hex(self.address)}" count="{self.count}" />' class MemoryWrite(APCI): CODE = APCIService.MEMORY_WRITE def __init__( self, address: int = 0, count: int = 0, data: bytes | None = None ) -> None: if data is None: data = bytearray() self.address = address self.count = count self.data = data def calculated_length(self) -> int: return 3 + len(self.data) def from_knx(self, raw: bytes) -> None: size = len(raw) - 4 count, self.address, self.data = struct.unpack(f"!BH{size}s", raw[1:]) self.count = count & DPTBinary.APCI_BITMASK def to_knx(self) -> bytes: if self.address < 0 or self.address >= 2 ** 16: raise ConversionError("Address out of range.") if self.count < 0 or self.count >= 2 ** 6: raise ConversionError("Count out of range.") size = len(self.data) payload = struct.pack(f"!BH{size}s", self.count, self.address, self.data) return encode_cmd_and_payload( self.CODE, encoded_payload=payload[0], appended_payload=payload[1:] ) def __str__(self) -> str: return f'<MemoryWrite address="{hex(self.address)}" count="{self.count}" data="{self.data.hex()}" />' class MemoryResponse(APCI): CODE = APCIService.MEMORY_RESPONSE def __init__( self, address: int = 0, count: int = 0, data: bytes | None = None ) -> None: if data is None: data = bytearray() self.address = address self.count = count self.data = data def calculated_length(self) -> int: return 3 + len(self.data) def from_knx(self, raw: bytes) -> None: size = len(raw) - 4 count, self.address, self.data = struct.unpack(f"!BH{size}s", raw[1:]) self.count = count & DPTBinary.APCI_BITMASK def to_knx(self) -> bytes: if self.address < 0 or self.address >= 2 ** 16: raise ConversionError("Address out of range.") if self.count < 0 or self.count >= 2 ** 6: raise ConversionError("Count out of range.") size = len(self.data) payload = struct.pack(f"!BH{size}s", self.count, self.address, self.data) return encode_cmd_and_payload( self.CODE, encoded_payload=payload[0], appended_payload=payload[1:] ) def __str__(self) -> str: return f'<MemoryResponse address="{hex(self.address)}" count="{self.count}" data="{self.data.hex()}" />' class DeviceDescriptorRead(APCI): CODE = APCIService.DEVICE_DESCRIPTOR_READ def __init__(self, descriptor: int = 0) -> None: self.descriptor = descriptor def calculated_length(self) -> int: return 1 def from_knx(self, raw: bytes) -> None: self.descriptor = raw[1] & 0x3F def to_knx(self) -> bytes: if self.descriptor < 0 or self.descriptor >= 2 ** 6: raise ConversionError("Descriptor out of range.") return encode_cmd_and_payload(self.CODE, encoded_payload=self.descriptor) def __str__(self) -> str: return f'<DeviceDescriptorRead descriptor="{self.descriptor}" />' class DeviceDescriptorResponse(APCI): CODE = APCIService.DEVICE_DESCRIPTOR_RESPONSE
MIT License
chrisgilmerproj/brewday
brew/utilities/yeast.py
KaiserYeastModel.get_growth_rate
python
def get_growth_rate(self, initial_cells): if initial_cells < 1.4: return 1.4 + self.adjustment elif 1.4 <= initial_cells < 3.5: return 2.33 - 0.67 * initial_cells + self.adjustment else: return 0.0 + self.adjustment
initial_cells - Billion / gram extract (B/g)
https://github.com/chrisgilmerproj/brewday/blob/fd8251e5bf34c20342034187fb30d9fffc723aa8/brew/utilities/yeast.py#L226-L235
import math from ..constants import GAL_PER_LITER from ..constants import IMPERIAL_TYPES from ..constants import IMPERIAL_UNITS from ..constants import LITER_PER_GAL from ..constants import OZ_PER_G from ..constants import SI_TYPES from ..constants import SI_UNITS from ..exceptions import YeastException from ..validators import validate_units from .sugar import plato_to_sg from .sugar import sg_to_gu from .sugar import sg_to_plato __all__ = [ u"PITCH_RATE_MAP", u"pitch_rate_conversion", u"YeastModel", u"KaiserYeastModel", u"WhiteYeastModel", ] PITCH_RATE_MAP = { u"MFG Recommended (Ale, fresh yeast only)": 0.35, u"MFG Recommended+ (Ale, fresh yeast only)": 0.55, u"Pro Brewer (Ale, LG)": 0.75, u"Pro Brewer (Ale)": 1.0, u"Pro Brewer (Ale, HG)": 1.25, u"Pro Brewer (Lager, LG)": 1.5, u"Pro Brewer (Lager)": 1.75, u"Pro Brewer (Lager, HG)": 2.0, } def pitch_rate_conversion(pitch_rate, units=IMPERIAL_UNITS): plato_per_gu = sg_to_gu(plato_to_sg(1)) if units == IMPERIAL_UNITS: return pitch_rate * GAL_PER_LITER * plato_per_gu elif units == SI_UNITS: return pitch_rate * LITER_PER_GAL / plato_per_gu class YeastModel(object): METHOD_TO_GROWTH_ADJ = {u"no agitation": 0.0, u"shaking": 0.0, u"stir plate": 0.0} def __init__(self, method, units=IMPERIAL_UNITS): if method not in self.METHOD_TO_GROWTH_ADJ.keys(): raise YeastException( u"Method '{}' not allowed for yeast model".format(method) ) self.method = method self.adjustment = self.METHOD_TO_GROWTH_ADJ[method] self.set_units(units) def set_units(self, units): self.units = validate_units(units) if self.units == IMPERIAL_UNITS: self.types = IMPERIAL_TYPES elif self.units == SI_UNITS: self.types = SI_TYPES def get_inoculation_rate(self, growth_rate): raise NotImplementedError def get_growth_rate(self, inoculation_rate): raise NotImplementedError def get_viability(self, days_since_manufacture): viability = 1.0 - days_since_manufacture * (0.21 / 30.0) if viability < 0: return 0.0 return viability def get_yeast_pitch_rate( self, original_gravity=1.050, final_volume=5.0, target_pitch_rate=1.42, yeast_type=u"liquid", cells_per_pack=100, num_packs=1, days_since_manufacture=30, ): viability = self.get_viability(days_since_manufacture) cells = cells_per_pack * num_packs * viability if self.units == IMPERIAL_UNITS: modifier = sg_to_gu(original_gravity) elif self.units == SI_UNITS: modifier = sg_to_plato(original_gravity) pitch_rate_as_is = cells / final_volume / modifier pitch_rate_cells = target_pitch_rate * final_volume * modifier if cells <= 0.0: required_growth_rate = 0.0 else: required_growth_rate = pitch_rate_cells / cells return { u"original_gravity": original_gravity, u"final_volume": final_volume, u"target_pitch_rate": target_pitch_rate, u"viability": round(viability, 2), u"cells": round(cells, 2), u"pitch_rate_as_is": round(pitch_rate_as_is, 2), u"pitch_rate_cells": round(pitch_rate_cells, 2), u"cells_needed": round(pitch_rate_cells - cells, 2), u"required_growth_rate": round(required_growth_rate, 2), u"units": self.units, } def get_starter_volume( self, available_cells, starter_volume=2.0 * GAL_PER_LITER, original_gravity=1.036, ): GPL = ( 2.845833 ) dme = GPL * sg_to_gu(original_gravity) * starter_volume if self.units == IMPERIAL_UNITS: inoculation_rate = available_cells / ( starter_volume * LITER_PER_GAL ) dme = dme * OZ_PER_G * LITER_PER_GAL elif self.units == SI_UNITS: inoculation_rate = available_cells / starter_volume growth_rate = self.get_growth_rate(inoculation_rate) end_cell_count = available_cells * (growth_rate + 1) return { u"available_cells": round(available_cells, 2), u"starter_volume": round(starter_volume, 2), u"original_gravity": original_gravity, u"dme": round(dme, 2), u"inoculation_rate": round(inoculation_rate, 2), u"growth_rate": round(growth_rate, 2), u"end_cell_count": round(end_cell_count, 2), u"units": self.units, } def get_resulting_pitch_rate( self, starter_cell_count, original_gravity=1.036, final_volume=5.0 ): if self.units == IMPERIAL_UNITS: modifier = sg_to_gu(original_gravity) elif self.units == SI_UNITS: modifier = sg_to_plato(original_gravity) pitch_rate = starter_cell_count / final_volume / modifier return pitch_rate class KaiserYeastModel(YeastModel): METHOD_TO_GROWTH_ADJ = {u"stir plate": 0.0} def __init__(self, method=u"stir plate", units=IMPERIAL_UNITS): return super(KaiserYeastModel, self).__init__(method, units=units) def get_inoculation_rate(self, growth_rate): if 0 < growth_rate < 1.4: return (2.33 - growth_rate) / 0.67 elif 1.4 <= growth_rate: return 1.4
MIT License
mozilla/app-validator
appvalidator/testcases/javascript/acorn.py
JSReflectException.line_num
python
def line_num(self, line_num): self.line = int(line_num) return self
Set the line number and return self for chaining
https://github.com/mozilla/app-validator/blob/2f8f85c4e83d1a76bb39c9f4b6844a7de7259711/appvalidator/testcases/javascript/acorn.py#L62-L65
import re import subprocess import json from appvalidator.contextgenerator import ContextGenerator import appvalidator.unicodehelper as unicodehelper JS_ESCAPE = re.compile("\\\\+[ux]", re.I) def get_tree(code, err=None, filename=None, shell_noop=None): if not code: return None try: return _get_tree(code) except JSReflectException as exc: str_exc = str(exc).strip("'\"") if "SyntaxError" in str_exc or "ReferenceError" in str_exc: err.warning( err_id=("testcases_scripting", "test_js_file", "syntax_error"), warning="JavaScript Compile-Time Error", description=["A compile-time error in the JavaScript halted " "validation of that file.", "Message: %s" % str_exc.split(":", 1)[-1].strip()], filename=filename, line=exc.line, context=ContextGenerator(code)) elif "InternalError: too much recursion" in str_exc: err.notice( err_id=("testcases_scripting", "test_js_file", "recursion_error"), notice="JS too deeply nested for validation", description="A JS file was encountered that could not be " "valiated due to limitations with Spidermonkey. " "It should be manually inspected.", filename=filename) else: err.error( err_id=("testcases_scripting", "test_js_file", "retrieving_tree"), error="JS reflection error prevented validation", description=["An error in the JavaScript file prevented it " "from being properly read by the Spidermonkey JS " "engine.", str(exc)], filename=filename) class JSReflectException(Exception): def __init__(self, value): self.value = value self.line = None def __str__(self): return repr(self.value)
BSD 3-Clause New or Revised License
tanghaibao/jcvi
jcvi/assembly/soap.py
prepare
python
def prepare(args): from jcvi.formats.base import write_file p = OptionParser(prepare.__doc__ + FastqNamings) p.add_option("-K", default=45, type="int", help="K-mer size") p.add_option( "--assemble_1st_rank_only", default=False, action="store_true", help="Assemble the first rank only, other libs asm_flags=2", ) p.add_option("--scaffold", help="Only perform scaffolding") p.add_option("--gapclose", help="Only perform gap closure") p.set_cpus() opts, args = p.parse_args(args) if len(args) < 1: sys.exit(not p.print_help()) fnames = args K = opts.K for x in fnames: assert op.exists(x), "File `{0}` not found.".format(x) a1st = opts.assemble_1st_rank_only cfgfile = "soap.config" gc_cfgfile = "soap.gc.config" fw = open(cfgfile, "w") fw_gc = open(gc_cfgfile, "w") libs = get_libs(fnames) rank = 0 max_rd_len = max(readlen([f]) for f in fnames) block = "max_rd_len={0}\n".format(max_rd_len) for stream in (sys.stderr, fw, fw_gc): print(block, file=stream) singletons = [] for lib, fs in libs: if lib.size == 0: singletons += fs continue for lib, fs in libs: size = lib.size if size == 0: continue rank += 1 block = "[LIB]\n" block += "avg_ins={0}\n".format(size) block += "reverse_seq={0}\n".format(lib.reverse_seq) asm_flags = 2 if (rank > 1 and a1st) else lib.asm_flags block += "asm_flags={0}\n".format(asm_flags) block += "rank={0}\n".format(rank) if lib.reverse_seq: pair_num_cutoff = 3 block += "pair_num_cutoff={0}\n".format(pair_num_cutoff) block += "map_len=35\n" for f in fs: if ".1." in f: tag = "q1" elif ".2." in f: tag = "q2" block += "{0}={1}\n".format(tag, f) if rank == 1: for s in singletons: tag = "q" if is_fastq(s) else "f" block += tag + "={0}\n".format(s) print(block, file=sys.stderr) print(block, file=fw) if asm_flags > 2: print(block, file=fw_gc) runfile = "run.sh" scaffold = opts.scaffold bb = 63 if K <= 63 else 127 binary = "SOAPdenovo-{0}mer".format(bb) header = SOAPHEADER.format(opts.cpus, K, binary) if opts.gapclose: gapclose = opts.gapclose outfile = gapclose.rsplit(".", 1)[0] + ".closed.fasta" template = header + GCRUNG.format(gapclose, outfile) else: template = header + (SCFRUN % scaffold if scaffold else SOAPRUN) write_file(runfile, template) fw.close() fw_gc.close()
%prog prepare *.fastq Scan input fastq files (see below) and write SOAP config files based on inputfiles. Use "--scaffold contigs.fasta" to perform scaffolding.
https://github.com/tanghaibao/jcvi/blob/3b161796234670ce1c4894974eaeb590d35cf2a2/jcvi/assembly/soap.py#L226-L327
import os.path as op import sys from jcvi.formats.fastq import guessoffset, readlen, is_fastq from jcvi.assembly.base import FastqNamings, Library, get_libs from jcvi.apps.base import OptionParser, ActionDispatcher, need_update, sh class FillLine(object): def __init__(self, row): args = row.split() self.start = int(args[0]) self.end = int(args[1]) self.leftextend = int(args[2]) self.rightextend = int(args[3]) self.closed = int(args[4]) == 1 self.extendlength = int(args[5]) self.before = int(args[6]) self.after = int(args[7]) if self.after > 0 and (self.after & 0x80000000): self.after += -0x100000000 @property def delta(self): return self.after - self.before def main(): actions = ( ("clean", "clean and dedup paired FASTQ files"), ("correct", "correct reads using ErrorCorrection"), ("prepare", "prepare SOAP config files and run script"), ("fillstats", "build stats on .fill file from GapCloser"), ) p = ActionDispatcher(actions) p.dispatch(globals()) SOAPHEADER = """ P={0} K={1} S=soap.config G=soap.gc.config C={2} A=asm$K """ GCRUN = ( "GapCloser_v1.12 -a ${A}.scafSeq -b $G -l 155 -o ${A}.closed.scafSeq -p 31 -t $P" ) GCRUNG = "GapCloser_v1.12 -a {0} -b $G -l 155 -o {1} -p 31 -t $P" SOAPRUN = ( """ $C pregraph -s $S -d 1 -K $K -o $A -R -p $P $C contig -s $S -g $A -M 1 -R -p $P $C map -s $S -g $A -p $P $C scaff -g $A -F -p $P """ + GCRUN ) SCFRUN = ( """ prepare -K $K -c %s -g $A $C map -s $S -g $A -p $P $C scaff -z -g $A -F -p $P """ + GCRUN ) def get_size(filename): library_name = lambda x: "-".join(op.basename(x).split(".")[0].split("-")[:2]) lib = Library(library_name(filename)) return lib.size def correct(args): p = OptionParser(correct.__doc__) p.set_cpus() opts, args = p.parse_args(args) if len(args) < 1: sys.exit(not p.print_help()) lstfile = "reads2cor.lst" fw = open(lstfile, "w") print("\n".join(x for x in args if x[:2] == "PE"), file=fw) fw.close() p1 = args[0] offset = guessoffset([p1]) cpus = opts.cpus freq = "output.freq.cz" freqlen = freq + ".len" if need_update(args, (freq, freqlen)): cmd = "KmerFreq_AR_v2.0 -k 17 -c -1 -q {0}".format(offset) cmd += " -m 1 -t {0}".format(cpus) cmd += " -p output {0}".format(lstfile) sh(cmd) fw = open(lstfile, "w") print("\n".join(args), file=fw) fw.close() cmd = "Corrector_AR_v2.0 -k 17 -l 3 -m 5 -c 5 -a 0 -e 1 -w 0 -r 45" cmd += " -Q {0} -q 30 -x 8 -t {1} -o 1 ".format(offset, cpus) cmd += " {0} {1} {2}".format(freq, freqlen, lstfile) sh(cmd) def clean(args): p = OptionParser(clean.__doc__) p.add_option("-a", default=0, type="int", help="Trim length at 5' end") p.add_option("-b", default=50, type="int", help="Trim length at 3' end") p.set_cpus() opts, args = p.parse_args(args) if len(args) == 2: p1, p2 = args size = get_size(p1) elif len(args) == 3: p1, p2, size = args size = int(size) else: sys.exit(not p.print_help()) pf = p1.split(".")[0] cpus = opts.cpus offset = guessoffset([p1]) a, b = opts.a, opts.b p1_clean = p1 + ".clean" p1_cleangz = p1_clean + ".gz" p2_clean = p2 + ".clean" p2_cleangz = p2_clean + ".gz" if need_update([p1, p2], [p1_cleangz, p2_cleangz]): cmd = "SOAPfilter_v2.0 -t {0} -m 2000000 -p -y -z -g".format(cpus) cmd += " -q {0} -w 10 -B 50 -f 0".format(offset) cmd += " -l {0} -a {1} -b {2} -c {1} -d {2}".format(size, a, b, a, b) cmd += " {0} {1} {2}.clean.stat {3} {4}".format(p1, p2, pf, p1_clean, p2_clean) sh(cmd) def fillstats(args): from jcvi.utils.cbook import SummaryStats, percentage, thousands p = OptionParser(fillstats.__doc__) opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) (fillfile,) = args fp = open(fillfile) scaffolds = 0 gaps = [] for row in fp: if row[0] == ">": scaffolds += 1 continue fl = FillLine(row) gaps.append(fl) print("{0} scaffolds in total".format(scaffolds), file=sys.stderr) closed = [x for x in gaps if x.closed] closedbp = sum(x.before for x in closed) notClosed = [x for x in gaps if not x.closed] notClosedbp = sum(x.before for x in notClosed) totalgaps = len(closed) + len(notClosed) print( "Closed gaps: {0} size: {1} bp".format( percentage(len(closed), totalgaps), thousands(closedbp) ), file=sys.stderr, ) ss = SummaryStats([x.after for x in closed]) print(ss, file=sys.stderr) ss = SummaryStats([x.delta for x in closed]) print("Delta:", ss, file=sys.stderr) print( "Remaining gaps: {0} size: {1} bp".format( percentage(len(notClosed), totalgaps), thousands(notClosedbp) ), file=sys.stderr, ) ss = SummaryStats([x.after for x in notClosed]) print(ss, file=sys.stderr)
BSD 2-Clause Simplified License
ousret/kiss-headers
kiss_headers/utils.py
header_name_to_class
python
def header_name_to_class(name: str, root_type: Type) -> Type: normalized_name = normalize_str(name).replace("_", "") for subclass in root_type.__subclasses__(): class_name = extract_class_name(subclass) if class_name is None: continue if ( not ( hasattr(subclass, "__override__") and subclass.__override__ is not None ) and normalize_str(class_name.split(".")[-1]) == normalized_name ): return subclass if subclass.__subclasses__(): try: return header_name_to_class(name, subclass) except TypeError: continue raise TypeError( "Cannot find a class matching header named '{name}'.".format(name=name) )
The opposite of class_to_header_name function. Will raise TypeError if no corresponding entry is found. Do it recursively from the root type. >>> from kiss_headers.builder import CustomHeader, ContentType, XContentTypeOptions, LastModified, Date >>> header_name_to_class("Content-Type", CustomHeader) <class 'kiss_headers.builder.ContentType'> >>> header_name_to_class("Last-Modified", CustomHeader) <class 'kiss_headers.builder.LastModified'>
https://github.com/ousret/kiss-headers/blob/2a2eaf9b87670a8994af57312a4003fdb6f16c22/kiss_headers/utils.py#L188-L223
from email.header import decode_header from re import findall, search, sub from typing import Any, Iterable, List, Optional, Set, Tuple, Type RESERVED_KEYWORD: Set[str] = { "and_", "assert_", "in_", "not_", "pass_", "finally_", "while_", "yield_", "is_", "as_", "break_", "return_", "elif_", "except_", "def_", "from_", "for_", } def normalize_str(string: str) -> str: return string.lower().replace("-", "_") def normalize_list(strings: List[str]) -> List[str]: return list(map(normalize_str, strings)) def unpack_protected_keyword(name: str) -> str: if len(name) < 2: return name if name[0] == "_" and name[1].isdigit(): name = name[1:] if name in RESERVED_KEYWORD: name = name[:-1] return name def extract_class_name(type_: Type) -> Optional[str]: r = findall(r"<class '([a-zA-Z0-9._]+)'>", str(type_)) return r[0] if r else None def header_content_split(string: str, delimiter: str) -> List[str]: if len(delimiter) != 1 or delimiter not in {";", ",", " "}: raise ValueError("Delimiter should be either semi-colon, a coma or a space.") in_double_quote: bool = False in_parenthesis: bool = False in_value: bool = False is_on_a_day: bool = False result: List[str] = [""] for letter, index in zip(string, range(0, len(string))): if letter == '"': in_double_quote = not in_double_quote if in_value and not in_double_quote: in_value = False elif letter == "(" and not in_parenthesis: in_parenthesis = True elif letter == ")" and in_parenthesis: in_parenthesis = False else: is_on_a_day = index >= 3 and string[index - 3 : index] in { "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun", } if not in_double_quote: if not in_value and letter == "=": in_value = True elif letter == ";" and in_value: in_value = False if in_value and letter == delimiter and not is_on_a_day: in_value = False if letter == delimiter and ( (in_value or in_double_quote or in_parenthesis or is_on_a_day) is False ): result[-1] = result[-1].lstrip().rstrip() result.append("") continue result[-1] += letter if result: result[-1] = result[-1].lstrip().rstrip() return result def class_to_header_name(type_: Type) -> str: if hasattr(type_, "__override__") and type_.__override__ is not None: return type_.__override__ class_raw_name: str = str(type_).split("'")[-2].split(".")[-1] if class_raw_name.endswith("_"): class_raw_name = class_raw_name[:-1] if class_raw_name.startswith("_"): class_raw_name = class_raw_name[1:] header_name: str = str() for letter in class_raw_name: if letter.isupper() and header_name != "": header_name += "-" + letter continue header_name += letter return header_name
MIT License
jaantollander/crowddynamics
crowddynamics/cli.py
run
python
def run(context, loglevel, num): setup_logging(loglevel) context.obj = dict(num=num)
Run simulation from the command-line.
https://github.com/jaantollander/crowddynamics/blob/a5858c02c06ed72f49b7bd6aaabd7cf16b3054c3/crowddynamics/cli.py#L67-L70
import logging import os from collections import OrderedDict from pprint import pformat import click from crowddynamics import __version__ from crowddynamics.logging import setup_logging, LOGLEVELS from crowddynamics.simulation.multiagent import MultiAgentSimulation from crowddynamics.traits import class_own_traits, trait_to_option from crowddynamics.utils import import_subclasses class Colors: NEUTRAL = 'blue' POSITIVE = 'green' NEGATIVE = 'red' def import_simulations(dir_path='.'): d = OrderedDict() for path in os.listdir(dir_path): base, ext = os.path.splitext(path) if ext == '.py': d.update(import_subclasses(path, MultiAgentSimulation)) return d @click.group(help="CrowdDynamics {version}. A tool for building and running " "crowd simulations.".format(version=__version__)) @click.version_option(__version__) def main(): pass @main.command('list') def list_of_simulations(): d = import_simulations() click.secho('List of available simulations:', fg=Colors.NEUTRAL) click.secho(pformat(d), fg=Colors.POSITIVE) @main.command() @click.option('--directory', '-d', default='.') @click.option('--basename', '-n') def concat_npy(directory, basename): import numpy as np from crowddynamics.io import load_npy_concatenated path = os.path.abspath(directory) arr = load_npy_concatenated(path, basename) np.save(os.path.join(path, basename + '.npy'), arr) @main.group(chain=True) @click.option('--loglevel', type=click.Choice(LOGLEVELS), default=logging.INFO, help='Choices for setting logging level.') @click.option('--num', type=int, default=1, help='Number of simulations to run.') @click.pass_context
MIT License