repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
clusterhq/flocker
flocker/node/agents/blockdevice.py
IBlockDeviceAsyncAPI.allocation_unit
python
def allocation_unit():
See ``IBlockDeviceAPI.allocation_unit``. :returns: A ``Deferred`` that fires with ``int`` size of the allocation_unit.
https://github.com/clusterhq/flocker/blob/eaa586248986d7cd681c99c948546c2b507e44de/flocker/node/agents/blockdevice.py#L952-L958
import itertools from uuid import UUID from stat import S_IRWXU, S_IRWXG, S_IRWXO from errno import EEXIST from datetime import timedelta from eliot import MessageType, ActionType, Field, Logger from eliot.serializers import identity from zope.interface import implementer, Interface, provider from pyrsistent import PClass, field, pmap_field, pset_field, thaw, CheckedPMap from characteristic import with_cmp from twisted.python.reflect import safe_repr from twisted.internet.defer import succeed, fail from twisted.python.filepath import FilePath from twisted.python.components import proxyForInterface from twisted.python.constants import ( Values, ValueConstant, Names, NamedConstant, ) from .blockdevice_manager import BlockDeviceManager from ._logging import DATASET_ID, COUNT from .. import ( IDeployer, ILocalState, IStateChange, in_parallel, NoOp, ) from .._deploy import NotInUseDatasets from ...control import NodeState, Manifestation, Dataset, NonManifestDatasets from ...control._model import pvector_field from ...common import RACKSPACE_MINIMUM_VOLUME_SIZE, auto_threaded, provides from ...common.algebraic import TaggedUnionInvariant _logger = Logger() DEFAULT_DATASET_SIZE = RACKSPACE_MINIMUM_VOLUME_SIZE PROFILE_METADATA_KEY = u"clusterhq:flocker:profile" class DatasetStates(Names): NON_EXISTENT = NamedConstant() UNREGISTERED = NamedConstant() REGISTERED = NamedConstant() ATTACHED_ELSEWHERE = NamedConstant() ATTACHED_TO_DEAD_NODE = NamedConstant() NON_MANIFEST = NamedConstant() ATTACHED_NO_FILESYSTEM = NamedConstant() ATTACHED = NamedConstant() MOUNTED = NamedConstant() DELETED = NamedConstant() class DiscoveredDataset(PClass): state = field( invariant=lambda state: (state in DatasetStates.iterconstants(), "Not a valid state"), mandatory=True, ) dataset_id = field(type=UUID, mandatory=True) maximum_size = field(type=(int, long)) blockdevice_id = field(type=unicode, mandatory=True) device_path = field(FilePath) mount_point = field(FilePath) __invariant__ = TaggedUnionInvariant( tag_attribute='state', attributes_for_tag={ DatasetStates.ATTACHED_ELSEWHERE: {'maximum_size'}, DatasetStates.ATTACHED_TO_DEAD_NODE: {'maximum_size'}, DatasetStates.NON_MANIFEST: {'maximum_size'}, DatasetStates.UNREGISTERED: {'maximum_size'}, DatasetStates.REGISTERED: set(), DatasetStates.ATTACHED_NO_FILESYSTEM: { 'device_path', 'maximum_size'}, DatasetStates.ATTACHED: { 'device_path', 'maximum_size'}, DatasetStates.MOUNTED: { 'device_path', 'mount_point', 'maximum_size'}, }, ) class DesiredDataset(PClass): state = field( invariant=lambda state: (state in DatasetStates.iterconstants(), "Not a valid state"), mandatory=True, ) dataset_id = field(type=UUID, mandatory=True) maximum_size = field(type=(int, long)) metadata = pmap_field( key_type=unicode, value_type=unicode, ) mount_point = field(FilePath) filesystem = field(unicode, initial=u"ext4", mandatory=True, invariant=lambda v: (v == "ext4", "Must be 'ext4'.")) __invariant__ = TaggedUnionInvariant( tag_attribute='state', attributes_for_tag={ DatasetStates.NON_MANIFEST: {"maximum_size"}, DatasetStates.MOUNTED: {"maximum_size", "mount_point"}, DatasetStates.DELETED: set(), }, ) class IDatasetStateChangeFactory(Interface): def from_state_and_config(discovered_dataset, desired_dataset): class ICalculator(Interface): def calculate_changes_for_datasets( discovered_datasets, desired_datasets, ): class VolumeException(Exception): def __init__(self, blockdevice_id): if not isinstance(blockdevice_id, unicode): raise TypeError( 'Unexpected blockdevice_id type. ' 'Expected unicode. ' 'Got {!r}.'.format(blockdevice_id) ) Exception.__init__(self, blockdevice_id) self.blockdevice_id = blockdevice_id class UnknownVolume(VolumeException): class AlreadyAttachedVolume(VolumeException): class UnattachedVolume(VolumeException): class DatasetExists(Exception): def __init__(self, blockdevice): Exception.__init__(self, blockdevice) self.blockdevice = blockdevice class FilesystemExists(Exception): def __init__(self, device): Exception.__init__(self, device) self.device = device class UnknownInstanceID(Exception): def __init__(self, blockdevice): Exception.__init__( self, 'Could not find valid instance ID for {}'.format(blockdevice)) self.blockdevice = blockdevice DATASET = Field( u"dataset", lambda dataset: dataset.dataset_id, u"The unique identifier of a dataset." ) VOLUME = Field( u"volume", lambda volume: volume.blockdevice_id, u"The unique identifier of a volume." ) FILESYSTEM_TYPE = Field.forTypes( u"filesystem_type", [unicode], u"The name of a filesystem." ) MOUNTPOINT = Field( u"mountpoint", lambda path: path.path, u"The absolute path to the location on the node where the dataset will be " u"mounted.", ) BLOCK_DEVICE_ID = Field( u"block_device_id", lambda id: unicode(id), u"The unique identifier if the underlying block device." ) BLOCK_DEVICE_SIZE = Field( u"block_device_size", identity, u"The size of the underlying block device." ) BLOCK_DEVICE_COMPUTE_INSTANCE_ID = Field( u"block_device_compute_instance_id", identity, u"An identifier for the host to which the underlying block device is " u"attached.", ) BLOCK_DEVICE_PATH = Field( u"block_device_path", lambda path: path.path, u"The system device file for an attached block device." ) PROFILE_NAME = Field.forTypes( u"profile_name", [unicode], u"The name of a profile for a volume." ) MAXIMUM_SIZE = Field.forTypes( u"maximum_size", [int], u"The maximum size of a volume.", ) METADATA = Field( u"metadata", thaw, u"The metadata of a dataset.", ) CREATE_BLOCK_DEVICE_DATASET = ActionType( u"agent:blockdevice:create", [DATASET_ID, MAXIMUM_SIZE, METADATA], [], u"A block-device-backed dataset is being created.", ) UNMOUNT_BLOCK_DEVICE = ActionType( u"agent:blockdevice:unmount", [DATASET_ID], [], u"A block-device-backed dataset is being unmounted.", ) UNMOUNT_BLOCK_DEVICE_DETAILS = MessageType( u"agent:blockdevice:unmount:details", [BLOCK_DEVICE_ID, BLOCK_DEVICE_PATH], u"The device file for a block-device-backed dataset has been discovered." ) MOUNT_BLOCK_DEVICE = ActionType( u"agent:blockdevice:mount", [DATASET_ID, BLOCK_DEVICE_PATH], [], u"A block-device-backed dataset is being mounted.", ) MOUNT_BLOCK_DEVICE_DETAILS = MessageType( u"agent:blockdevice:mount:details", [BLOCK_DEVICE_PATH], u"The device file for a block-device-backed dataset has been discovered." ) ATTACH_VOLUME = ActionType( u"agent:blockdevice:attach_volume", [DATASET_ID, BLOCK_DEVICE_ID], [], u"The volume for a block-device-backed dataset is being attached." ) DETACH_VOLUME = ActionType( u"agent:blockdevice:detach_volume", [DATASET_ID, BLOCK_DEVICE_ID], [], u"The volume for a block-device-backed dataset is being detached." ) DESTROY_VOLUME = ActionType( u"agent:blockdevice:destroy_volume", [BLOCK_DEVICE_ID], [], u"The volume for a block-device-backed dataset is being destroyed." ) CREATE_FILESYSTEM = ActionType( u"agent:blockdevice:create_filesystem", [BLOCK_DEVICE_PATH, FILESYSTEM_TYPE], [], u"A block device is being initialized with a filesystem.", ) INVALID_DEVICE_PATH_VALUE = Field( u"invalid_value", lambda value: safe_repr(value), u"A value returned from IBlockDeviceAPI.get_device_path which could not " u"possibly be correct. This likely indicates a bug in the " "IBlockDeviceAPI implementation.", ) INVALID_DEVICE_PATH = MessageType( u"agent:blockdevice:discover_state:invalid_device_path", [DATASET_ID, INVALID_DEVICE_PATH_VALUE], u"The device path given by the IBlockDeviceAPI implementation was " u"invalid.", ) CREATE_VOLUME_PROFILE_DROPPED = MessageType( u"agent:blockdevice:profiles:create_volume_with_profiles:profile_dropped", [DATASET_ID, PROFILE_NAME], u"The profile of a volume was dropped during creation because the backend " u"does not support profiles. Use a backend that provides " u"IProfiledBlockDeviceAPI to get profile support." ) DISCOVERED_RAW_STATE = MessageType( u"agent:blockdevice:raw_state", [Field(u"raw_state", safe_repr)], u"The discovered raw state of the node's block device volumes.") UNREGISTERED_VOLUME_ATTACHED = MessageType( u"agent:blockdevice:unregistered_volume_attached", [DATASET_ID, BLOCK_DEVICE_ID], u"A blockdevice that isn't registered as belonging to a dataset is " u"attached to an instance." ) FUNCTION_NAME = Field.for_types( "function", [bytes, unicode], u"The name of the function.") CALL_LIST_VOLUMES = MessageType( u"flocker:node:agents:blockdevice:list_volumes", [FUNCTION_NAME, COUNT], u"list_volumes called.",) REGISTER_BLOCKDEVICE = ActionType( u"agent:blockdevice:register", [DATASET_ID, BLOCK_DEVICE_ID], [], u"A block-device is being registered as belonging to a dataset.", ) def _volume_field(): return field( type=BlockDeviceVolume, mandatory=True, factory=lambda x: x ) @with_cmp(["blockdevice_id", "dataset_id", "size", "attached_to"]) class BlockDeviceVolume(PClass): blockdevice_id = field(type=unicode, mandatory=True) size = field(type=(int, long), mandatory=True) attached_to = field( type=(unicode, type(None)), initial=None, mandatory=True ) dataset_id = field(type=UUID, mandatory=True) def _blockdevice_volume_from_datasetid(volumes, dataset_id): for volume in volumes: if volume.dataset_id == dataset_id: return volume @implementer(IStateChange) @provider(IDatasetStateChangeFactory) class CreateFilesystem(PClass): device = field(type=FilePath, mandatory=True) filesystem = field(type=unicode, mandatory=True) @classmethod def from_state_and_config(cls, discovered_dataset, desired_dataset): return cls( device=discovered_dataset.device_path, filesystem=desired_dataset.filesystem, ) @property def eliot_action(self): return CREATE_FILESYSTEM( _logger, block_device_path=self.device, filesystem_type=self.filesystem ) def run(self, deployer, state_persister): try: _ensure_no_filesystem(self.device, deployer.block_device_manager) deployer.block_device_manager.make_filesystem(self.device, self.filesystem) except: return fail() return succeed(None) def _ensure_no_filesystem(device, block_device_manager): if block_device_manager.has_filesystem(device): raise FilesystemExists(device) def _valid_size(size): if size % 1024 == 0: return (True, "") return ( False, "Filesystem size must be multiple of 1024, not %d" % (size,) ) @implementer(IStateChange) @provider(IDatasetStateChangeFactory) class MountBlockDevice(PClass): device_path = field(type=FilePath, mandatory=True) mountpoint = field(type=FilePath, mandatory=True) dataset_id = field(type=UUID, mandatory=True) @classmethod def from_state_and_config(cls, discovered_dataset, desired_dataset): return cls( dataset_id=desired_dataset.dataset_id, device_path=discovered_dataset.device_path, mountpoint=desired_dataset.mount_point, ) @property def eliot_action(self): return MOUNT_BLOCK_DEVICE(_logger, dataset_id=self.dataset_id, block_device_path=self.device_path) def run(self, deployer, state_persister): try: self.mountpoint.makedirs() except OSError as e: if e.errno != EEXIST: return fail() self.mountpoint.parent().chmod(S_IRWXU) deployer.block_device_manager.mount(self.device_path, self.mountpoint) lostfound = self.mountpoint.child(b"lost+found") if self.mountpoint.children() == [lostfound]: lostfound.remove() self.mountpoint.chmod(S_IRWXU | S_IRWXG | S_IRWXO) self.mountpoint.restat() return succeed(None) @implementer(IStateChange) @provider(IDatasetStateChangeFactory) class UnmountBlockDevice(PClass): dataset_id = field(type=UUID, mandatory=True) blockdevice_id = field(type=unicode, mandatory=True) @classmethod def from_state_and_config(cls, discovered_dataset, desired_dataset): return cls( dataset_id=discovered_dataset.dataset_id, blockdevice_id=discovered_dataset.blockdevice_id, ) @property def eliot_action(self): return UNMOUNT_BLOCK_DEVICE(_logger, dataset_id=self.dataset_id) def run(self, deployer, state_persister): api = deployer.async_block_device_api deferred_device_path = api.get_device_path(self.blockdevice_id) def got_device(device): UNMOUNT_BLOCK_DEVICE_DETAILS( block_device_id=self.blockdevice_id, block_device_path=device ).write(_logger) deployer.block_device_manager.unmount(device) deferred_device_path.addCallback(got_device) return deferred_device_path @implementer(IStateChange) @provider(IDatasetStateChangeFactory) class AttachVolume(PClass): dataset_id = field(type=UUID, mandatory=True) blockdevice_id = field(type=unicode, mandatory=True) @classmethod def from_state_and_config(cls, discovered_dataset, desired_dataset): return cls( dataset_id=discovered_dataset.dataset_id, blockdevice_id=discovered_dataset.blockdevice_id, ) @property def eliot_action(self): return ATTACH_VOLUME(_logger, dataset_id=self.dataset_id, block_device_id=self.blockdevice_id) def run(self, deployer, state_persister): api = deployer.async_block_device_api getting_id = api.compute_instance_id() def got_compute_id(compute_instance_id): return api.attach_volume( self.blockdevice_id, attach_to=compute_instance_id, ) attaching = getting_id.addCallback(got_compute_id) return attaching @implementer(IStateChange) @provider(IDatasetStateChangeFactory) class DetachVolume(PClass): dataset_id = field(type=UUID, mandatory=True) blockdevice_id = field(type=unicode, mandatory=True) @classmethod def from_state_and_config(cls, discovered_dataset, desired_dataset): return cls( dataset_id=discovered_dataset.dataset_id, blockdevice_id=discovered_dataset.blockdevice_id, ) @property def eliot_action(self): return DETACH_VOLUME(_logger, dataset_id=self.dataset_id, block_device_id=self.blockdevice_id) def run(self, deployer, state_persister): api = deployer.async_block_device_api return api.detach_volume(self.blockdevice_id) @implementer(IStateChange) @provider(IDatasetStateChangeFactory) class DestroyVolume(PClass): blockdevice_id = field(type=unicode, mandatory=True) @classmethod def from_state_and_config(cls, discovered_dataset, desired_dataset): return cls(blockdevice_id=discovered_dataset.blockdevice_id) @property def eliot_action(self): return DESTROY_VOLUME(_logger, block_device_id=self.blockdevice_id) def run(self, deployer, state_persister): api = deployer.async_block_device_api return api.destroy_volume(self.blockdevice_id) def allocated_size(allocation_unit, requested_size): allocation_unit = int(allocation_unit) requested_size = int(requested_size) previous_interval_size = ( (requested_size // allocation_unit) * allocation_unit ) if previous_interval_size < requested_size: return previous_interval_size + allocation_unit else: return requested_size @implementer(IStateChange) @provider(IDatasetStateChangeFactory) class CreateBlockDeviceDataset(PClass): dataset_id = field(UUID, mandatory=True) maximum_size = field(type=(int, long), mandatory=True) metadata = pmap_field(unicode, unicode) @classmethod def from_state_and_config(cls, discovered_dataset, desired_dataset): return cls( dataset_id=desired_dataset.dataset_id, maximum_size=desired_dataset.maximum_size, metadata=desired_dataset.metadata, ) @property def eliot_action(self): return CREATE_BLOCK_DEVICE_DATASET( _logger, dataset_id=self.dataset_id, maximum_size=self.maximum_size, metadata=self.metadata, ) def _create_volume(self, deployer): api = deployer.block_device_api profile_name = self.metadata.get(PROFILE_METADATA_KEY) size = allocated_size(allocation_unit=api.allocation_unit(), requested_size=self.maximum_size) if profile_name: return ( deployer.profiled_blockdevice_api.create_volume_with_profile( dataset_id=self.dataset_id, size=size, profile_name=profile_name ) ) else: return api.create_volume(dataset_id=self.dataset_id, size=size) def run(self, deployer, state_persister): api = deployer.block_device_api try: check_for_existing_dataset(api, self.dataset_id) except: return fail() return self._create_volume(deployer) @implementer(IStateChange) @provider(IDatasetStateChangeFactory) class RegisterVolume(PClass): dataset_id = field(type=UUID, mandatory=True) blockdevice_id = field(type=unicode, mandatory=True) @classmethod def from_state_and_config(cls, discovered_dataset, desired_dataset): return cls( dataset_id=discovered_dataset.dataset_id, blockdevice_id=discovered_dataset.blockdevice_id, ) @property def eliot_action(self): return REGISTER_BLOCKDEVICE( dataset_id=self.dataset_id, block_device_id=self.blockdevice_id, ) def run(self, deployer, state_persister): return state_persister.record_ownership( dataset_id=self.dataset_id, blockdevice_id=self.blockdevice_id, ) class IBlockDeviceAsyncAPI(Interface):
Apache License 2.0
aspose-words-cloud/aspose-words-cloud-python
asposewordscloud/models/modification_operation_result.py
ModificationOperationResult.dest
python
def dest(self, dest): self._dest = dest
Sets the dest of this ModificationOperationResult. Gets or sets the link to the dest document (result of the modification operation). # noqa: E501 :param dest: The dest of this ModificationOperationResult. # noqa: E501 :type: FileLink
https://github.com/aspose-words-cloud/aspose-words-cloud-python/blob/abf8fccfed40aa2b09c6cdcaf3f2723e1f412d85/asposewordscloud/models/modification_operation_result.py#L79-L87
import pprint import re import datetime import six import json class ModificationOperationResult(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'dest': 'FileLink', 'source': 'FileLink' } attribute_map = { 'dest': 'Dest', 'source': 'Source' } def __init__(self, dest=None, source=None): self._dest = None self._source = None self.discriminator = None if dest is not None: self.dest = dest if source is not None: self.source = source @property def dest(self): return self._dest @dest.setter
MIT License
nspyre-org/nspyre
src/nspyre/inserv/__init__.py
InservCmdPrompt.do_dev_all
python
def do_dev_all(self, arg_string): if arg_string: print('Expected 0 args') return try: self.inserv.reload_devices() except Exception as exc: logger.exception(exc) print('Failed to reload all devices') return
Restart the connection with all devices
https://github.com/nspyre-org/nspyre/blob/d254af09c7c8377552e85dba6f60b150fbb8da2e/src/nspyre/inserv/__init__.py#L78-L88
import argparse import cmd import pathlib import pdb import logging import signal import pyvisa from nspyre.config import load_meta_config from nspyre.definitions import SERVER_META_CONFIG_PATH from nspyre.errors import InstrumentServerError from nspyre.misc import nspyre_init_logger from .inserv import InstrumentServer from .gateway import InservGateway __all__ = [ 'InservGateway', 'InstrumentServer' ] logger = logging.getLogger(__name__) class InservCmdPrompt(cmd.Cmd): def __init__(self, inserv): super().__init__() self.inserv = inserv def emptyline(self): pass def do_list(self, arg_string): if arg_string: print('Expected 0 args') return for d in self.inserv._devs.keys(): print(d) def do_config(self, arg_string): if arg_string: print('Expected 0 args') return try: self.inserv.update_config(config_file=args[0] if arg_string else None) except Exception as exc: logger.exception(exc) print('Failed to reload config files') return def do_dev(self, arg_string): args = arg_string.split(' ') if not arg_string or len(args) > 1: print('Expected 1 arg: device name') return dev_name = args[0] try: self.inserv.reload_device(dev_name) except Exception as exc: logger.exception(exc) print('Failed to reload device [{}]'.format(dev_name)) return
BSD 3-Clause New or Revised License
exopy/exopy
exopy/testing/instruments/fixtures.py
instr_workbench
python
def instr_workbench(workbench, monkeypatch, app_dir, app): monkeypatch.setattr(ErrorsPlugin, 'exit_error_gathering', exit_on_err) workbench.register(CoreManifest()) workbench.register(AppManifest()) workbench.register(PreferencesManifest()) workbench.register(IconManagerManifest()) workbench.register(ErrorsManifest()) workbench.register(StateManifest()) workbench.register(DependenciesManifest()) workbench.register(InstrumentManagerManifest()) yield workbench for m_id in ('exopy.instruments', 'exopy.app.dependencies', 'exopy.app.errors', 'exopy.app.preferences', 'exopy.app.icons', 'exopy.app.states', 'exopy.app'): try: workbench.unregister(m_id) except Exception: pass sleep(0.1)
Setup the workbench in such a way that the instrs manager can be tested.
https://github.com/exopy/exopy/blob/aeda9bcfad2d2f76903c7ad2800ea2110ff689b2/exopy/testing/instruments/fixtures.py#L36-L62
from time import sleep import pytest import enaml from exopy.testing.util import exit_on_err with enaml.imports(): from enaml.workbench.core.core_manifest import CoreManifest from exopy.app.app_manifest import AppManifest from exopy.app.preferences.manifest import PreferencesManifest from exopy.app.dependencies.manifest import DependenciesManifest from exopy.app.icons.manifest import IconManagerManifest from exopy.app.errors.manifest import ErrorsManifest from exopy.app.states.manifest import StateManifest from exopy.app.errors.plugin import ErrorsPlugin from exopy.instruments.manifest import InstrumentManagerManifest pytests_plugin = str('exopy.testing.fixtures'), @pytest.fixture
BSD 3-Clause New or Revised License
smarie/python-pytest-cases
pytest_cases/common_pytest_lazy_values.py
Lazy.__repr__
python
def __repr__(self): return "%s(%s)" % (self.__class__.__name__, ", ".join("%s=%r" % (k, getattr(self, k)) for k in self._field_names))
Default repr method based on the _field_names
https://github.com/smarie/python-pytest-cases/blob/d67e5bb9846ad16f9d9669e568e88db064b28e63/pytest_cases/common_pytest_lazy_values.py#L61-L65
from functools import partial import weakref try: from inspect import signature except ImportError: from funcsigs import signature try: from typing import Union, Callable, List, Set, Tuple, Any, Sequence, Optional, Iterable except ImportError: pass try: from _pytest.mark.structures import MarkDecorator, Mark except ImportError: pass from .common_pytest_marks import get_pytest_marks_on_function, markdecorators_as_tuple, PYTEST53_OR_GREATER, markdecorators_to_markinfos class Lazy(object): __slots__ = () _field_names = () def get_id(self): raise NotImplementedError() def get(self, request_or_item): raise NotImplementedError() def __str__(self): return self.get_id() def __eq__(self, other): try: return all(getattr(self, k) == getattr(other, k) for k in self._field_names) except Exception: return False
BSD 3-Clause New or Revised License
alliefitter/boto3_type_annotations
boto3_type_annotations_with_docs/boto3_type_annotations/ds/client.py
Client.add_tags_to_resource
python
def add_tags_to_resource(self, ResourceId: str, Tags: List) -> Dict: pass
Adds or overwrites one or more tags for the specified directory. Each directory can have a maximum of 50 tags. Each tag consists of a key and optional value. Tag keys must be unique to each resource. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/ds-2015-04-16/AddTagsToResource>`_ **Request Syntax** :: response = client.add_tags_to_resource( ResourceId='string', Tags=[ { 'Key': 'string', 'Value': 'string' }, ] ) **Response Syntax** :: {} **Response Structure** - *(dict) --* :type ResourceId: string :param ResourceId: **[REQUIRED]** Identifier (ID) for the directory to which to add the tag. :type Tags: list :param Tags: **[REQUIRED]** The tags to be assigned to the directory. - *(dict) --* Metadata assigned to a directory consisting of a key-value pair. - **Key** *(string) --* **[REQUIRED]** Required name of the tag. The string value can be Unicode characters and cannot be prefixed with \"aws:\". The string can contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). - **Value** *(string) --* **[REQUIRED]** The optional value of the tag. The string value can be Unicode characters. The string can contain only the set of Unicode letters, digits, white-space, \'_\', \'.\', \'/\', \'=\', \'+\', \'-\' (Java regex: \"^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$\"). :rtype: dict :returns:
https://github.com/alliefitter/boto3_type_annotations/blob/2a88aa562b1aee6e8a6cc30402980884b3707fbb/boto3_type_annotations_with_docs/boto3_type_annotations/ds/client.py#L134-L172
from typing import Optional from botocore.client import BaseClient from typing import Dict from botocore.paginate import Paginator from botocore.waiter import Waiter from typing import Union from typing import List class Client(BaseClient): def accept_shared_directory(self, SharedDirectoryId: str) -> Dict: pass def add_ip_routes(self, DirectoryId: str, IpRoutes: List, UpdateSecurityGroupForDirectoryControllers: bool = None) -> Dict: pass
MIT License
kmadac/bitstamp-python-client
bitstamp/client.py
Trading.check_bitstamp_code
python
def check_bitstamp_code(self, code): data = {'code': code} return self._post("check_code/", data=data, return_json=True, version=1)
Returns JSON dictionary containing USD and BTC amount included in given bitstamp code.
https://github.com/kmadac/bitstamp-python-client/blob/14a25e9b3ceac980e4f0107b9985a08afcea55b2/bitstamp/client.py#L375-L382
from functools import wraps import hmac import hashlib import time import warnings import logging import requests logger = logging.getLogger(__name__) class BitstampError(Exception): pass class TransRange(object): HOUR = 'hour' MINUTE = 'minute' DAY = 'day' class BaseClient(object): api_url = {1: 'https://www.bitstamp.net/api/', 2: 'https://www.bitstamp.net/api/v2/'} exception_on_error = True def __init__(self, proxydict=None, *args, **kwargs): self.proxydict = proxydict def _get(self, *args, **kwargs): return self._request(requests.get, *args, **kwargs) def _post(self, *args, **kwargs): data = self._default_data() data.update(kwargs.get('data') or {}) kwargs['data'] = data return self._request(requests.post, *args, **kwargs) def _default_data(self): return {} def _construct_url(self, url, base, quote): if not base and not quote: return url else: url = url + base.lower() + quote.lower() + "/" return url def _request(self, func, url, version=1, *args, **kwargs): return_json = kwargs.pop('return_json', False) url = self.api_url[version] + url logger.debug("Request URL: " + url) if 'data' in kwargs and 'nonce' in kwargs['data']: logger.debug("Request nonce: " + str(kwargs['data']['nonce'])) response = func(url, *args, **kwargs) logger.debug("Response Code {} and Reason {}".format(response.status_code, response.reason)) logger.debug("Response Text {}".format(response.text)) if 'proxies' not in kwargs: kwargs['proxies'] = self.proxydict response.raise_for_status() try: json_response = response.json() except ValueError: json_response = None if isinstance(json_response, dict): error = json_response.get('error') if error: raise BitstampError(error) elif json_response.get('status') == "error": raise BitstampError(json_response.get('reason')) if return_json: if json_response is None: raise BitstampError( "Could not decode json for: " + response.text) return json_response return response class Public(BaseClient): def ticker(self, base="btc", quote="usd"): url = self._construct_url("ticker/", base, quote) return self._get(url, return_json=True, version=2) def ticker_hour(self, base="btc", quote="usd"): url = self._construct_url("ticker_hour/", base, quote) return self._get(url, return_json=True, version=2) def order_book(self, group=True, base="btc", quote="usd"): params = {'group': group} url = self._construct_url("order_book/", base, quote) return self._get(url, params=params, return_json=True, version=2) def transactions(self, time=TransRange.HOUR, base="btc", quote="usd"): params = {'time': time} url = self._construct_url("transactions/", base, quote) return self._get(url, params=params, return_json=True, version=2) def conversion_rate_usd_eur(self): return self._get("eur_usd/", return_json=True, version=1) def trading_pairs_info(self): return self._get("trading-pairs-info/", return_json=True, version=2) class Trading(Public): def __init__(self, username, key, secret, *args, **kwargs): super(Trading, self).__init__( username=username, key=key, secret=secret, *args, **kwargs) self.username = username self.key = key self.secret = secret def get_nonce(self): nonce = getattr(self, '_nonce', 0) if nonce: nonce += 1 self._nonce = max(int(time.time()), nonce) return self._nonce def _default_data(self, *args, **kwargs): data = super(Trading, self)._default_data(*args, **kwargs) data['key'] = self.key nonce = self.get_nonce() msg = str(nonce) + self.username + self.key signature = hmac.new( self.secret.encode('utf-8'), msg=msg.encode('utf-8'), digestmod=hashlib.sha256).hexdigest().upper() data['signature'] = signature data['nonce'] = nonce return data def _expect_true(self, response): if response.text == u'true': return True raise BitstampError("Unexpected response") def account_balance(self, base="btc", quote="usd"): url = self._construct_url("balance/", base, quote) return self._post(url, return_json=True, version=2) def user_transactions(self, offset=0, limit=100, descending=True, base=None, quote=None): data = { 'offset': offset, 'limit': limit, 'sort': 'desc' if descending else 'asc', } url = self._construct_url("user_transactions/", base, quote) return self._post(url, data=data, return_json=True, version=2) def open_orders(self, base="btc", quote="usd"): url = self._construct_url("open_orders/", base, quote) return self._post(url, return_json=True, version=2) def all_open_orders(self): return self._post('open_orders/all/', return_json=True, version=2) def order_status(self, order_id): data = {'id': order_id} return self._post("order_status/", data=data, return_json=True, version=1) def cancel_order(self, order_id, version=1): data = {'id': order_id} return self._post("cancel_order/", data=data, return_json=True, version=version) def cancel_all_orders(self): return self._post("cancel_all_orders/", return_json=True, version=1) def buy_limit_order(self, amount, price, base="btc", quote="usd", limit_price=None, ioc_order=False): data = {'amount': amount, 'price': price} if limit_price is not None: data['limit_price'] = limit_price if ioc_order is True: data['ioc_order'] = True url = self._construct_url("buy/", base, quote) return self._post(url, data=data, return_json=True, version=2) def buy_market_order(self, amount, base="btc", quote="usd"): data = {'amount': amount} url = self._construct_url("buy/market/", base, quote) return self._post(url, data=data, return_json=True, version=2) def sell_limit_order(self, amount, price, base="btc", quote="usd", limit_price=None, ioc_order=False): data = {'amount': amount, 'price': price} if limit_price is not None: data['limit_price'] = limit_price if ioc_order is True: data['ioc_order'] = True url = self._construct_url("sell/", base, quote) return self._post(url, data=data, return_json=True, version=2) def sell_market_order(self, amount, base="btc", quote="usd"): data = {'amount': amount} url = self._construct_url("sell/market/", base, quote) return self._post(url, data=data, return_json=True, version=2)
MIT License
wittawatj/fsic-test
fsic/ex/ex2_prob_params.py
job_nfsicJ3_perm_stoopt
python
def job_nfsicJ3_perm_stoopt(paired_source, tr, te, r): n_permute = 500 J = 3 with util.ContextTimer() as t: nfsic_opt_options = {'n_test_locs':J, 'max_iter':300, 'V_step':1, 'W_step':1, 'gwidthx_step':1, 'gwidthy_step':1, 'batch_proportion':0.7, 'tol_fun':1e-4, 'step_pow':0.5, 'seed':r+2, 'reg': 1e-6} op_V, op_W, op_gwx, op_gwy, info = it.GaussNFSIC.optimize_locs_widths(tr, alpha, **nfsic_opt_options ) nfsic_opt = it.GaussNFSIC(op_gwx, op_gwy, op_V, op_W, alpha, reg='auto', n_permute=n_permute, seed=r+3) nfsic_opt_result = nfsic_opt.perform_test(te) return {'indtest': nfsic_opt, 'test_result': nfsic_opt_result, 'time_secs': t.secs}
Use permutations to simulate from the null distribution.
https://github.com/wittawatj/fsic-test/blob/01d63dc984f052ba3e814b7410376d776463e84e/fsic/ex/ex2_prob_params.py#L91-L106
__author__ = 'wittawat' import fsic.data as data import fsic.feature as fea import fsic.indtest as it import fsic.glo as glo import fsic.util as util import fsic.kernel as kernel import exglobal import independent_jobs as inj from independent_jobs.jobs.IndependentJob import IndependentJob from independent_jobs.results.SingleResult import SingleResult from independent_jobs.aggregators.SingleResultAggregator import SingleResultAggregator from independent_jobs.engines.BatchClusterParameters import BatchClusterParameters from independent_jobs.engines.SerialComputationEngine import SerialComputationEngine from independent_jobs.engines.SlurmComputationEngine import SlurmComputationEngine from independent_jobs.tools.Log import logger import math import numpy as np import os import sys import time def job_nfsic_opt(paired_source, tr, te, r): with util.ContextTimer() as t: nfsic_opt_options = {'n_test_locs':J, 'max_iter':200, 'V_step':1, 'W_step':1, 'gwidthx_step':1, 'gwidthy_step':1, 'batch_proportion':1.0, 'tol_fun':1e-4, 'step_pow':0.5, 'seed':r+2, 'reg': 1e-6} op_V, op_W, op_gwx, op_gwy, info = it.GaussNFSIC.optimize_locs_widths(tr, alpha, **nfsic_opt_options ) nfsic_opt = it.GaussNFSIC(op_gwx, op_gwy, op_V, op_W, alpha, reg='auto', seed=r+3) nfsic_opt_result = nfsic_opt.perform_test(te) return {'indtest': nfsic_opt, 'test_result': nfsic_opt_result, 'time_secs': t.secs} def job_nfsicJ3_opt(paired_source, tr, te, r, J=3): with util.ContextTimer() as t: nfsic_opt_options = {'n_test_locs':J, 'max_iter':200, 'V_step':1, 'W_step':1, 'gwidthx_step':1, 'gwidthy_step':1, 'batch_proportion':1.0, 'tol_fun':1e-4, 'step_pow':0.5, 'seed':r+2, 'reg': 1e-6} op_V, op_W, op_gwx, op_gwy, info = it.GaussNFSIC.optimize_locs_widths(tr, alpha, **nfsic_opt_options ) nfsic_opt = it.GaussNFSIC(op_gwx, op_gwy, op_V, op_W, alpha, reg='auto', seed=r+3) nfsic_opt_result = nfsic_opt.perform_test(te) return {'indtest':nfsic_opt, 'test_result': nfsic_opt_result, 'time_secs': t.secs} def job_nfsicJ10_opt(paired_source, tr, te, r): return job_nfsicJ3_opt(paired_source, tr, te, r, J=10) def job_nfsicJ10_stoopt(paired_source, tr, te, r, n_permute=None): J = 10 with util.ContextTimer() as t: nfsic_opt_options = {'n_test_locs':J, 'max_iter':200, 'V_step':1, 'W_step':1, 'gwidthx_step':1, 'gwidthy_step':1, 'batch_proportion':0.7, 'tol_fun':1e-4, 'step_pow':0.5, 'seed':r+2, 'reg': 1e-6} op_V, op_W, op_gwx, op_gwy, info = it.GaussNFSIC.optimize_locs_widths(tr, alpha, **nfsic_opt_options ) nfsic_opt = it.GaussNFSIC(op_gwx, op_gwy, op_V, op_W, alpha, reg='auto', n_permute=n_permute, seed=r+3) nfsic_opt_result = nfsic_opt.perform_test(te) return { 'test_result': nfsic_opt_result, 'time_secs': t.secs} def job_nfsicJ10_perm_stoopt(paired_source, tr, te, r): n_permute = 500 return job_nfsicJ10_stoopt(paired_source, tr, te, r, n_permute)
MIT License
hyperiongray/starbelly
starbelly/rate_limiter.py
RateLimiter.add_job
python
def add_job(self, job_id): job_send, job_recv = trio.open_memory_channel(0) self._job_channels[job_id] = job_send return job_recv
Add a job to the rate limiter. Returns a send channel that requests for this job will be sent to. :param str job_id: A job ID.
https://github.com/hyperiongray/starbelly/blob/163b7e48d8816c3ce77fed0e01eeaf9705c4919c/starbelly/rate_limiter.py#L132-L141
from binascii import hexlify from collections import deque from dataclasses import dataclass import functools import hashlib from heapq import heappop, heappush import logging import trio logger = logging.getLogger(__name__) GLOBAL_RATE_LIMIT_TOKEN = b'\x00' * 16 @dataclass @functools.total_ordering class Expiry: time: float token: bytes def __eq__(self, other): time1 = self.time if isinstance(other, Expiry): time2 = other.time elif isinstance(other, (int, float)): time2 = other return time1 == time2 def __lt__(self, other): time1 = self.time if isinstance(other, Expiry): time2 = other.time elif isinstance(other, (int, float)): time2 = other return time1 < time2 def __repr__(self): return 'Expiry(time={:0.3f}, token={})'.format(self.time, hexlify(self.token).decode('ascii')) def get_domain_token(domain): hash_ = hashlib.blake2b(domain.encode('ascii'), digest_size=16) token = hash_.digest() return token class RateLimiter: def __init__(self, capacity): self._expires = list() self._expiry_cancel_scope = None self._global_limit = None self._queues = dict() self._rate_limits = dict() self._capacity = capacity self._semaphore = trio.Semaphore(capacity) self._request_send, self._request_recv = trio.open_memory_channel(0) self._reset_send, self._reset_recv = trio.open_memory_channel(0) self._job_channels = dict() @property def item_count(self): return self._capacity - self._semaphore.value @property def job_count(self): return len(self._job_channels)
MIT License
kiwiz/gkeepapi
gkeepapi/node.py
NodeSettings.checked_listitems_policy
python
def checked_listitems_policy(self): return self._checked_listitems_policy
Get the policy for checked listitems. Returns: gkeepapi.node.CheckedListItemsPolicyValue: Policy.
https://github.com/kiwiz/gkeepapi/blob/c5daf8ebfebc2db3db0e9b09e0aab8257ee1ca45/gkeepapi/node.py#L808-L814
import datetime import logging import time import random import enum import six from operator import attrgetter from future.utils import raise_from from . import exception DEBUG = False logger = logging.getLogger(__name__) class NodeType(enum.Enum): Note = 'NOTE' List = 'LIST' ListItem = 'LIST_ITEM' Blob = 'BLOB' class BlobType(enum.Enum): Audio = 'AUDIO' Image = 'IMAGE' Drawing = 'DRAWING' class ColorValue(enum.Enum): White = 'DEFAULT' Red = 'RED' Orange = 'ORANGE' Yellow = 'YELLOW' Green = 'GREEN' Teal = 'TEAL' Blue = 'BLUE' DarkBlue = 'CERULEAN' Purple = 'PURPLE' Pink = 'PINK' Brown = 'BROWN' Gray = 'GRAY' class CategoryValue(enum.Enum): Books = 'BOOKS' Food = 'FOOD' Movies = 'MOVIES' Music = 'MUSIC' Places = 'PLACES' Quotes = 'QUOTES' Travel = 'TRAVEL' TV = 'TV' class SuggestValue(enum.Enum): GroceryItem = 'GROCERY_ITEM' class NewListItemPlacementValue(enum.Enum): Top = 'TOP' Bottom = 'BOTTOM' class GraveyardStateValue(enum.Enum): Expanded = 'EXPANDED' Collapsed = 'COLLAPSED' class CheckedListItemsPolicyValue(enum.Enum): Default = 'DEFAULT' Graveyard = 'GRAVEYARD' class ShareRequestValue(enum.Enum): Add = 'WR' Remove = 'RM' class RoleValue(enum.Enum): Owner = 'O' User = 'W' class Element(object): def __init__(self): self._dirty = False def _find_discrepancies(self, raw): s_raw = self.save(False) if isinstance(raw, dict): for key, val in raw.items(): if key in ['parentServerId', 'lastSavedSessionId']: continue if key not in s_raw: logger.info('Missing key for %s key %s', type(self), key) continue if isinstance(val, (list, dict)): continue val_a = raw[key] val_b = s_raw[key] if isinstance(val_a, six.string_types) and isinstance(val_b, six.string_types): try: tval_a = NodeTimestamps.str_to_dt(val_a) tval_b = NodeTimestamps.str_to_dt(val_b) val_a, val_b = tval_a, tval_b except (KeyError, ValueError): pass if val_a != val_b: logger.info('Different value for %s key %s: %s != %s', type(self), key, raw[key], s_raw[key]) elif isinstance(raw, list): if len(raw) != len(s_raw): logger.info('Different length for %s: %d != %d', type(self), len(raw), len(s_raw)) def load(self, raw): try: self._load(raw) except (KeyError, ValueError) as e: raise_from(exception.ParseException('Parse error in %s' % (type(self)), raw), e) def _load(self, raw): self._dirty = raw.get('_dirty', False) def save(self, clean=True): ret = {} if clean: self._dirty = False else: ret['_dirty'] = self._dirty return ret @property def dirty(self): return self._dirty class Annotation(Element): def __init__(self): super(Annotation, self).__init__() self.id = self._generateAnnotationId() def _load(self, raw): super(Annotation, self)._load(raw) self.id = raw.get('id') def save(self, clean=True): ret = {} if self.id is not None: ret = super(Annotation, self).save(clean) if self.id is not None: ret['id'] = self.id return ret @classmethod def _generateAnnotationId(cls): return '%08x-%04x-%04x-%04x-%012x' % ( random.randint(0x00000000, 0xffffffff), random.randint(0x0000, 0xffff), random.randint(0x0000, 0xffff), random.randint(0x0000, 0xffff), random.randint(0x000000000000, 0xffffffffffff) ) class WebLink(Annotation): def __init__(self): super(WebLink, self).__init__() self._title = '' self._url = '' self._image_url = None self._provenance_url = '' self._description = '' def _load(self, raw): super(WebLink, self)._load(raw) self._title = raw['webLink']['title'] self._url = raw['webLink']['url'] self._image_url = raw['webLink']['imageUrl'] if 'imageUrl' in raw['webLink'] else self.image_url self._provenance_url = raw['webLink']['provenanceUrl'] self._description = raw['webLink']['description'] def save(self, clean=True): ret = super(WebLink, self).save(clean) ret['webLink'] = { 'title': self._title, 'url': self._url, 'imageUrl': self._image_url, 'provenanceUrl': self._provenance_url, 'description': self._description, } return ret @property def title(self): return self._title @title.setter def title(self, value): self._title = value self._dirty = True @property def url(self): return self._url @url.setter def url(self, value): self._url = value self._dirty = True @property def image_url(self): return self._image_url @image_url.setter def image_url(self, value): self._image_url = value self._dirty = True @property def provenance_url(self): return self._provenance_url @provenance_url.setter def provenance_url(self, value): self._provenance_url = value self._dirty = True @property def description(self): return self._description @description.setter def description(self, value): self._description = value self._dirty = True class Category(Annotation): def __init__(self): super(Category, self).__init__() self._category = None def _load(self, raw): super(Category, self)._load(raw) self._category = CategoryValue(raw['topicCategory']['category']) def save(self, clean=True): ret = super(Category, self).save(clean) ret['topicCategory'] = { 'category': self._category.value } return ret @property def category(self): return self._category @category.setter def category(self, value): self._category = value self._dirty = True class TaskAssist(Annotation): def __init__(self): super(TaskAssist, self).__init__() self._suggest = None def _load(self, raw): super(TaskAssist, self)._load(raw) self._suggest = raw['taskAssist']['suggestType'] def save(self, clean=True): ret = super(TaskAssist, self).save(clean) ret['taskAssist'] = { 'suggestType': self._suggest } return ret @property def suggest(self): return self._suggest @suggest.setter def suggest(self, value): self._suggest = value self._dirty = True class Context(Annotation): def __init__(self): super(Context, self).__init__() self._entries = {} def _load(self, raw): super(Context, self)._load(raw) self._entries = {} for key, entry in raw.get('context', {}).items(): self._entries[key] = NodeAnnotations.from_json({key: entry}) def save(self, clean=True): ret = super(Context, self).save(clean) context = {} for entry in self._entries.values(): context.update(entry.save(clean)) ret['context'] = context return ret def all(self): return self._entries.values() @property def dirty(self): return super(Context, self).dirty or any((annotation.dirty for annotation in self._entries.values())) class NodeAnnotations(Element): def __init__(self): super(NodeAnnotations, self).__init__() self._annotations = {} def __len__(self): return len(self._annotations) @classmethod def from_json(cls, raw): bcls = None if 'webLink' in raw: bcls = WebLink elif 'topicCategory' in raw: bcls = Category elif 'taskAssist' in raw: bcls = TaskAssist elif 'context' in raw: bcls = Context if bcls is None: logger.warning('Unknown annotation type: %s', raw.keys()) return None annotation = bcls() annotation.load(raw) return annotation def all(self): return self._annotations.values() def _load(self, raw): super(NodeAnnotations, self)._load(raw) self._annotations = {} if 'annotations' not in raw: return for raw_annotation in raw['annotations']: annotation = self.from_json(raw_annotation) self._annotations[annotation.id] = annotation def save(self, clean=True): ret = super(NodeAnnotations, self).save(clean) ret['kind'] = 'notes#annotationsGroup' if self._annotations: ret['annotations'] = [annotation.save(clean) for annotation in self._annotations.values()] return ret def _get_category_node(self): for annotation in self._annotations.values(): if isinstance(annotation, Category): return annotation return None @property def category(self): node = self._get_category_node() return node.category if node is not None else None @category.setter def category(self, value): node = self._get_category_node() if value is None: if node is not None: del self._annotations[node.id] else: if node is None: node = Category() self._annotations[node.id] = node node.category = value self._dirty = True @property def links(self): return [annotation for annotation in self._annotations.values() if isinstance(annotation, WebLink) ] def append(self, annotation): self._annotations[annotation.id] = annotation self._dirty = True return annotation def remove(self, annotation): if annotation.id in self._annotations: del self._annotations[annotation.id] self._dirty = True @property def dirty(self): return super(NodeAnnotations, self).dirty or any((annotation.dirty for annotation in self._annotations.values())) class NodeTimestamps(Element): TZ_FMT = '%Y-%m-%dT%H:%M:%S.%fZ' def __init__(self, create_time=None): super(NodeTimestamps, self).__init__() if create_time is None: create_time = time.time() self._created = self.int_to_dt(create_time) self._deleted = self.int_to_dt(0) self._trashed = self.int_to_dt(0) self._updated = self.int_to_dt(create_time) self._edited = self.int_to_dt(create_time) def _load(self, raw): super(NodeTimestamps, self)._load(raw) if 'created' in raw: self._created = self.str_to_dt(raw['created']) self._deleted = self.str_to_dt(raw['deleted']) if 'deleted' in raw else None self._trashed = self.str_to_dt(raw['trashed']) if 'trashed' in raw else None self._updated = self.str_to_dt(raw['updated']) self._edited = self.str_to_dt(raw['userEdited']) if 'userEdited' in raw else None def save(self, clean=True): ret = super(NodeTimestamps, self).save(clean) ret['kind'] = 'notes#timestamps' ret['created'] = self.dt_to_str(self._created) if self._deleted is not None: ret['deleted'] = self.dt_to_str(self._deleted) if self._trashed is not None: ret['trashed'] = self.dt_to_str(self._trashed) ret['updated'] = self.dt_to_str(self._updated) if self._edited is not None: ret['userEdited'] = self.dt_to_str(self._edited) return ret @classmethod def str_to_dt(cls, tzs): return datetime.datetime.strptime(tzs, cls.TZ_FMT) @classmethod def int_to_dt(cls, tz): return datetime.datetime.utcfromtimestamp(tz) @classmethod def dt_to_str(cls, dt): return dt.strftime(cls.TZ_FMT) @classmethod def int_to_str(cls, tz): return cls.dt_to_str(cls.int_to_dt(tz)) @property def created(self): return self._created @created.setter def created(self, value): self._created = value self._dirty = True @property def deleted(self): return self._deleted @deleted.setter def deleted(self, value): self._deleted = value self._dirty = True @property def trashed(self): return self._trashed @trashed.setter def trashed(self, value): self._trashed = value self._dirty = True @property def updated(self): return self._updated @updated.setter def updated(self, value): self._updated = value self._dirty = True @property def edited(self): return self._edited @edited.setter def edited(self, value): self._edited = value self._dirty = True class NodeSettings(Element): def __init__(self): super(NodeSettings, self).__init__() self._new_listitem_placement = NewListItemPlacementValue.Bottom self._graveyard_state = GraveyardStateValue.Collapsed self._checked_listitems_policy = CheckedListItemsPolicyValue.Graveyard def _load(self, raw): super(NodeSettings, self)._load(raw) self._new_listitem_placement = NewListItemPlacementValue(raw['newListItemPlacement']) self._graveyard_state = GraveyardStateValue(raw['graveyardState']) self._checked_listitems_policy = CheckedListItemsPolicyValue(raw['checkedListItemsPolicy']) def save(self, clean=True): ret = super(NodeSettings, self).save(clean) ret['newListItemPlacement'] = self._new_listitem_placement.value ret['graveyardState'] = self._graveyard_state.value ret['checkedListItemsPolicy'] = self._checked_listitems_policy.value return ret @property def new_listitem_placement(self): return self._new_listitem_placement @new_listitem_placement.setter def new_listitem_placement(self, value): self._new_listitem_placement = value self._dirty = True @property def graveyard_state(self): return self._graveyard_state @graveyard_state.setter def graveyard_state(self, value): self._graveyard_state = value self._dirty = True @property
MIT License
aceinna/python-openimu
src/aceinna/devices/openimu/uart_provider.py
Provider.get_param
python
def get_param(self, params, *args): command_line = helper.build_input_packet( 'gP', properties=self.properties, param=params['paramId']) result = yield self._message_center.build(command=command_line) data = result['data'] error = result['error'] if error: yield { 'packetType': 'error', 'data': 'No Response' } if data: yield { 'packetType': 'inputParam', 'data': data } yield { 'packetType': 'error', 'data': 'No Response' }
Update paramter value
https://github.com/aceinna/python-openimu/blob/5cb6d35fb683d5847c0d268c71cdf63c895c5d34/src/aceinna/devices/openimu/uart_provider.py#L373-L400
import os import re import sys import time import json import binascii import math import datetime import threading import struct from azure.storage.blob import BlockBlobService from ...framework.utils import helper from ...framework.utils import resource from ..base import OpenDeviceBase from ..configs.openimu_predefine import ( APP_STR, DEFAULT_PRODUCT_NAME, get_openimu_products ) from ...framework.context import APP_CONTEXT from ..decorator import with_device_message from ...framework.configuration import get_config from ..upgrade_workers import ( FirmwareUpgradeWorker, JumpBootloaderWorker, JumpApplicationWorker, UPGRADE_EVENT ) from ...framework.utils.print import print_yellow class Provider(OpenDeviceBase): def __init__(self, communicator, *args): super(Provider, self).__init__(communicator) self.type = 'IMU' self.server_update_rate = 50 self.is_logging = False self.is_mag_align = False self.bootloader_baudrate = 57600 self.device_info = None self.app_info = None self.app_config_folder = '' self.parameters = None self.enable_data_log = True self.prepare_folders() self.is_backup = False self.is_restore = False self.is_app_matched = False self.connected = True def prepare_folders(self): executor_path = resource.get_executor_path() setting_folder_name = 'setting' config_file_name = 'openimu.json' data_folder_path = os.path.join(executor_path, 'data') if not os.path.isdir(data_folder_path): os.makedirs(data_folder_path) self.setting_folder_path = os.path.join( executor_path, setting_folder_name) all_products = get_openimu_products() for product in all_products: product_folder = os.path.join(self.setting_folder_path, product) if not os.path.isdir(product_folder): os.makedirs(product_folder) for app_name in all_products[product]: app_name_path = os.path.join(product_folder, app_name) app_name_config_path = os.path.join( app_name_path, config_file_name) if not os.path.isfile(app_name_config_path): if not os.path.isdir(app_name_path): os.makedirs(app_name_path) app_config_content = resource.get_content_from_bundle( setting_folder_name, os.path.join(product, app_name, config_file_name)) if app_config_content is None: continue with open(app_name_config_path, "wb") as code: code.write(app_config_content) @property def is_in_bootloader(self): if not self.device_info or not self.device_info.__contains__('name'): return False if 'bootloader' in self.device_info['name'].lower(): return True return False def bind_device_info(self, device_access, device_info, app_info): self._build_device_info(device_info) self._build_app_info(app_info) self.connected = True return '# Connected {0} #\n\rDevice: {1} \n\rFirmware: {2}' .format('OpenIMU', device_info, app_info) def _build_device_info(self, text): split_text = [x for x in text.split(' ') if x != '' and x != '\x00'] split_len = len(split_text) if split_len < 3: self.device_info = { 'name': split_text[0], 'product_name': split_text[0], 'pn': '-', 'firmware_version': '-', 'sn': '-' } return if split_len == 3: pre_sn = split_text[2].split(':') serial_num = pre_sn[1] if len(pre_sn) == 2 else pre_sn[0] self.device_info = { 'name': split_text[0], 'product_name': split_text[0], 'pn': split_text[1], 'firmware_version': '-', 'sn': serial_num } return pre_sn = split_text[-1].split(':') serial_num = pre_sn[1] if len(pre_sn) == 2 else '' self.device_info = { 'name': ' '.join(split_text[0:-3]), 'product_name': split_text[0], 'pn': split_text[-3], 'firmware_version': split_text[-2], 'sn': serial_num } def _build_app_info(self, text): product_name = '' app_version = text can_indicator = '_J1939' if can_indicator in app_version: app_version = app_version.replace(can_indicator, '') split_text = [x for x in app_version.split(' ') if x != ''] app_name = next( (item for item in APP_STR if item in split_text), None) if len(split_text) == 3: product_name = split_text[0] if not app_name: app_name = 'IMU' self.is_app_matched = False else: self.is_app_matched = True self.app_info = { 'app_name': app_name, 'version': text, 'product_name': product_name } def load_properties(self): local_config_file_path = os.path.join(os.getcwd(), 'openimu.json') if os.path.isfile(local_config_file_path): with open(local_config_file_path) as json_data: self.properties = json.load(json_data) return product_name = self.app_info['product_name'] if self.app_info['product_name'] else self.device_info['product_name'] app_name = self.app_info['app_name'] app_file_path = os.path.join( self.setting_folder_path, product_name, app_name, 'openimu.json') if not os.path.isfile(app_file_path): app_file_path = os.path.join( self.setting_folder_path, DEFAULT_PRODUCT_NAME, app_name, 'openimu.json') if not self.is_app_matched: print_yellow( 'Failed to extract app version information from unit.' + '\nThe supported application list is {0}.'.format(APP_STR) + '\nTo keep runing, use IMU configuration as default.' + '\nYou can choose to place your json file under execution path if it is an unknown application.') with open(app_file_path) as json_data: self.properties = json.load(json_data) def after_setup(self): if hasattr(self.communicator, 'serial_port'): self.original_baudrate = self.communicator.serial_port.baudrate def on_read_raw(self, data): pass def on_receive_output_packet(self, packet_type, data, *args, **kwargs): self.add_output_packet(packet_type, data) def get_log_info(self): packet_rate = next( (item['value'] for item in self.parameters if item['name'] == 'Packet Rate'), '100') return { "type": self.type, "model": self.device_info['name'], "logInfo": { "pn": self.device_info['pn'], "sn": self.device_info['sn'], "sampleRate": packet_rate, "appVersion": self.app_info['version'], "imuProperties": json.dumps(self.properties) } } def before_jump_app_command(self): self.communicator.serial_port.baudrate = self.bootloader_baudrate def after_jump_app_command(self): self.communicator.serial_port.baudrate = self.original_baudrate def before_write_content(self): self.communicator.serial_port.baudrate = self.bootloader_baudrate self.communicator.serial_port.reset_input_buffer() def firmware_write_command_generator(self, data_len, current, data): command_WA = 'WA' message_bytes = [] message_bytes.extend(struct.pack('>I', current)) message_bytes.extend(struct.pack('B', data_len)) message_bytes.extend(data) return helper.build_packet(command_WA, message_bytes) def get_upgrade_workers(self, firmware_content): firmware_worker = FirmwareUpgradeWorker( self.communicator, firmware_content, self.firmware_write_command_generator) firmware_worker.on(UPGRADE_EVENT.BEFORE_WRITE, lambda: self.before_write_content()) firmware_worker.on( UPGRADE_EVENT.FIRST_PACKET, lambda: time.sleep(8)) jump_bootloader_command = helper.build_bootloader_input_packet( 'JI') jump_bootloader_worker = JumpBootloaderWorker( self.communicator, command=jump_bootloader_command, listen_packet='JI', wait_timeout_after_command=3) jump_application_command = helper.build_bootloader_input_packet('JA') jump_application_worker = JumpApplicationWorker( self.communicator, command=jump_application_command, listen_packet='JA', wait_timeout_after_command=3) jump_application_worker.on( UPGRADE_EVENT.BEFORE_COMMAND, self.before_jump_app_command) jump_application_worker.on( UPGRADE_EVENT.AFTER_COMMAND, self.after_jump_app_command) return [jump_bootloader_worker, firmware_worker, jump_application_worker] def get_device_connection_info(self): return { 'modelName': self.device_info['name'], 'deviceType': self.type, 'serialNumber': self.device_info['sn'], 'partNumber': self.device_info['pn'], 'firmware': self.device_info['firmware_version'] } def get_operation_status(self): if self.is_logging: return 'LOGGING' if self.is_upgrading: return 'UPGRADING' if self.is_mag_align: return 'MAG_ALIGN' if self.is_backup: return 'BACKUP' if self.is_restore: return 'RESTORE' return 'IDLE' def get_device_info(self, *args): return { 'packetType': 'deviceInfo', 'data': [ {'name': 'Product Name', 'value': self.device_info['name']}, {'name': 'PN', 'value': self.device_info['pn']}, {'name': 'Firmware Version', 'value': self.device_info['firmware_version']}, {'name': 'SN', 'value': self.device_info['sn']}, {'name': 'App Version', 'value': self.app_info['version']} ] } def get_conf(self, *args): return { 'packetType': 'conf', 'data': { 'outputs': self.properties['userMessages']['outputPackets'], 'inputParams': self.properties['userConfiguration'] } } @with_device_message def get_params(self, *args): command_line = helper.build_input_packet('gA') result = yield self._message_center.build(command=command_line, timeout=3) data = result['data'] if data: self.parameters = data yield { 'packetType': 'inputParams', 'data': data } yield { 'packetType': 'error', 'data': 'No Response' } @with_device_message
Apache License 2.0
linkedin/naarad
src/naarad/__init__.py
Naarad._process_naarad_config
python
def _process_naarad_config(self, config, analysis): graph_timezone = None output_directory = analysis.output_directory resource_path = analysis.resource_path run_steps = defaultdict(list) metrics = defaultdict(list) indir_default = '' crossplots = [] report_args = {} graphing_library = None ts_start, ts_end = None, None if config.has_section('GLOBAL'): ts_start, ts_end = naarad.utils.parse_global_section(config, 'GLOBAL') if config.has_option('GLOBAL', 'user_defined_metrics'): naarad.utils.parse_user_defined_metric_classes(config, metric_classes) config.remove_section('GLOBAL') if config.has_section('REPORT'): report_args = naarad.utils.parse_report_section(config, 'REPORT') config.remove_section('REPORT') for section in config.sections(): if section == 'GRAPH': graphing_library, crossplots, outdir_default, indir_default, graph_timezone = naarad.utils.parse_graph_section(config, section, output_directory, indir_default) elif section.startswith('RUN-STEP'): run_step = naarad.utils.parse_run_step_section(config, section) if not run_step: logger.error('Ignoring section %s, could not parse it correctly', section) continue if run_step.run_order == CONSTANTS.PRE_ANALYSIS_RUN: run_steps['pre'].append(run_step) elif run_step.run_order == CONSTANTS.DURING_ANALYSIS_RUN: run_steps['in'].append(run_step) elif run_step.run_order == CONSTANTS.POST_ANALYSIS_RUN: run_steps['post'].append(run_step) else: logger.error('Unknown RUN-STEP run_order specified') else: if not naarad.utils.is_valid_metric_name(section): logger.critical('Section name %s is invalid! Only letters, digits, dot(.), dash(-), underscore(_) are allowed' % section) return CONSTANTS.CRITICAL_FAILURE if section == 'SAR-*': hostname, infile, label, ts_start, ts_end, precision, kwargs, rule_strings = naarad.utils.parse_basic_metric_options(config, section) sar_metrics = naarad.utils.get_all_sar_objects(metrics, infile, hostname, output_directory, label, ts_start, ts_end, None) for sar_metric in sar_metrics: if sar_metric.ts_start is None and (sar_metric.ts_end is None or sar_metric.ts_end > ts_start): sar_metric.ts_start = ts_start if sar_metric.ts_end is None and (sar_metric.ts_start is None or ts_end > sar_metric.ts_start): sar_metric.ts_end = ts_end metrics['metrics'].extend(sar_metrics) else: new_metric = naarad.utils.parse_metric_section(config, section, metric_classes, metrics['metrics'], aggregate_metric_classes, output_directory, resource_path) if new_metric.ts_start is None and (new_metric.ts_end is None or new_metric.ts_end > ts_start): new_metric.ts_start = ts_start if new_metric.ts_end is None and (new_metric.ts_start is None or ts_end > new_metric.ts_start): new_metric.ts_end = ts_end metric_type = section.split('-')[0] if metric_type in aggregate_metric_classes: metrics['aggregate_metrics'].append(new_metric) else: metrics['metrics'].append(new_metric) return metrics, run_steps, crossplots, report_args, graph_timezone, graphing_library
Process the config file associated with a particular analysis and return metrics, run_steps and crossplots. Also sets output directory and resource_path for an anlaysis
https://github.com/linkedin/naarad/blob/261e2c0760fd6a6b0ee59064180bd8e3674311fe/src/naarad/__init__.py#L401-L475
from collections import defaultdict import ConfigParser import errno import logging import os import threading import naarad.utils import naarad.naarad_constants as CONSTANTS from naarad_imports import metric_classes from naarad_imports import aggregate_metric_classes from naarad_imports import graphing_modules from naarad_imports import reporting_modules from naarad.reporting.diff import Diff from naarad.reporting.diff import NaaradReport logger = logging.getLogger('naarad') class _Analysis(object): def __init__(self, ts_start, config, test_id=None): self.ts_start = ts_start self.ts_end = None self.test_id = test_id self.config = config self.description = '' self.input_directory = None self.output_directory = None self.resource_path = 'resources' self.status = CONSTANTS.OK self.sla_data = {} self.stats_data = {} self.variables = None class Naarad(object): def __init__(self): self._default_test_id = -1 self._analyses = {} self._resource_path = 'resources' self._input_directory = None self._output_directory = None self.return_exit_code = False self.skip_plots = False self.available_graphing_modules = graphing_modules logger.info('Available graphing modules: %s ', ','.join(self.available_graphing_modules.keys())) naarad.metrics.metric.Metric.graphing_modules = self.available_graphing_modules naarad.reporting.diff.Diff.graphing_modules = self.available_graphing_modules naarad.metrics.metric.Metric.device_types = CONSTANTS.device_type_metrics def create_analysis(self, config): self._default_test_id += 1 self._analyses[self._default_test_id] = _Analysis(ts_start=None, config=config, test_id=self._default_test_id) def signal_start(self, config, test_id=None, **kwargs): if not test_id: self._default_test_id += 1 test_id = self._default_test_id self._analyses[test_id] = _Analysis(naarad.utils.get_standardized_timestamp('now', None), config, test_id=test_id) if kwargs: if 'description' in kwargs.keys(): self._analyses[test_id].description = kwargs['description'] if 'input_directory' in kwargs.keys(): self._analyses[test_id].input_directory = kwargs['input_directory'] if 'output_directory' in kwargs.keys(): self._analyses[test_id].output_directory = kwargs['output_directory'] return test_id def signal_stop(self, test_id=None): if test_id is None: test_id = self._default_test_id if self._analyses[test_id].ts_end: return CONSTANTS.OK self._analyses[test_id].ts_end = naarad.utils.get_standardized_timestamp('now', None) return CONSTANTS.OK def get_failed_analyses(self): failed_analyses = [] for test_id in self._analyses.keys(): if self._analyses[test_id].status != CONSTANTS.OK: failed_analyses.append(test_id) return failed_analyses def get_sla_data(self, test_id): return self._analyses[test_id].sla_data def _set_sla_data(self, test_id, metrics): for metric in metrics: self._analyses[test_id].sla_data[metric.label] = metric.sla_map return CONSTANTS.OK def get_stats_data(self, test_id): return self._analyses[test_id].stats_data def _set_stats_data(self, test_id, metrics): for metric in metrics: self._analyses[test_id].stats_data[metric.label] = metric.summary_stats return CONSTANTS.OK def _create_output_directories(self, analysis): try: os.makedirs(analysis.output_directory) except OSError as exception: if exception.errno != errno.EEXIST: raise try: resource_directory = os.path.join(analysis.output_directory, analysis.resource_path) os.makedirs(resource_directory) except OSError as exception: if exception.errno != errno.EEXIST: raise def _run_pre(self, analysis, run_steps): workload_run_steps = [] for run_step in sorted(run_steps, key=lambda step: step.run_rank): run_step.run() if run_step.run_type == CONSTANTS.RUN_TYPE_WORKLOAD: workload_run_steps.append(run_step) if len(workload_run_steps) > 0: analysis.ts_start, analysis.ts_end = naarad.utils.get_run_time_period(workload_run_steps) return CONSTANTS.OK def _run_post(self, run_steps): for run_step in sorted(run_steps, key=lambda step: step.run_rank): run_step.run() return CONSTANTS.OK def _process_args(self, analysis, args): if args.exit_code: self.return_exit_code = args.exit_code if args.no_plots: self.skip_plots = args.no_plots if args.start: analysis.ts_start = naarad.utils.get_standardized_timestamp(args.start, None) if args.end: analysis.ts_end = naarad.utils.get_standardized_timestamp(args.end, None) if args.variables: analysis.variables = naarad.utils.get_variables(args) return CONSTANTS.OK def analyze(self, input_directory, output_directory, **kwargs): is_api_call = True if len(self._analyses) == 0: if 'config' not in kwargs.keys(): return CONSTANTS.ERROR self.create_analysis(kwargs['config']) if 'args' in kwargs: self._process_args(self._analyses[0], kwargs['args']) is_api_call = False error_count = 0 self._input_directory = input_directory self._output_directory = output_directory for test_id in sorted(self._analyses.keys()): if not self._analyses[test_id].input_directory: self._analyses[test_id].input_directory = input_directory if not self._analyses[test_id].output_directory: if len(self._analyses) > 1: self._analyses[test_id].output_directory = os.path.join(output_directory, str(test_id)) else: self._analyses[test_id].output_directory = output_directory if('config' in kwargs.keys()) and (not self._analyses[test_id].config): self._analyses[test_id].config = kwargs['config'] self._create_output_directories(self._analyses[test_id]) self._analyses[test_id].status = self.run(self._analyses[test_id], is_api_call, **kwargs) if self._analyses[test_id].status != CONSTANTS.OK: error_count += 1 if len(self._analyses) == 1: return self._analyses[0].status elif error_count > 0: return CONSTANTS.ERROR else: return CONSTANTS.OK def run(self, analysis, is_api_call, **kwargs): threads = [] crossplots = [] report_args = {} metrics = defaultdict() run_steps = defaultdict(list) discovery_mode = False graph_timezone = None graphing_library = None if isinstance(analysis.config, str): if not naarad.utils.is_valid_file(analysis.config): return CONSTANTS.INVALID_CONFIG config_object = ConfigParser.ConfigParser(analysis.variables) config_object.optionxform = str config_object.read(analysis.config) elif isinstance(analysis.config, ConfigParser.ConfigParser): config_object = analysis.config else: if is_api_call: return CONSTANTS.INVALID_CONFIG else: metrics['metrics'] = naarad.utils.discover_by_name(analysis.input_directory, analysis.output_directory) if len(metrics['metrics']) == 0: logger.warning('Unable to auto detect metrics in the specified input directory: %s', analysis.input_directory) return CONSTANTS.ERROR else: discovery_mode = True metrics['aggregate_metrics'] = [] if not discovery_mode: metrics, run_steps, crossplots, report_args, graph_timezone, graphing_library = self._process_naarad_config(config_object, analysis) if graphing_library is None: graphing_library = CONSTANTS.DEFAULT_GRAPHING_LIBRARY if graphing_library not in self.available_graphing_modules.keys(): logger.error("Naarad cannot import graphing library %s on your system. Will not generate static charts", graphing_library) self.skip_plots = True if not is_api_call: self._run_pre(analysis, run_steps['pre']) for metric in metrics['metrics']: if analysis.ts_start: metric.ts_start = analysis.ts_start if analysis.ts_end: metric.ts_end = analysis.ts_end thread = threading.Thread(target=naarad.utils.parse_and_plot_single_metrics, args=(metric, graph_timezone, analysis.output_directory, analysis.input_directory, graphing_library, self.skip_plots)) thread.start() threads.append(thread) for t in threads: t.join() for metric in metrics['aggregate_metrics']: thread = threading.Thread(target=naarad.utils.parse_and_plot_single_metrics, args=(metric, graph_timezone, analysis.output_directory, analysis.input_directory, graphing_library, self.skip_plots)) thread.start() threads.append(thread) for t in threads: t.join() self._set_sla_data(analysis.test_id, metrics['metrics'] + metrics['aggregate_metrics']) self._set_stats_data(analysis.test_id, metrics['metrics'] + metrics['aggregate_metrics']) if len(crossplots) > 0 and not self.skip_plots: correlated_plots = naarad.utils.nway_plotting(crossplots, metrics['metrics'] + metrics['aggregate_metrics'], os.path.join(analysis.output_directory, analysis.resource_path), analysis.resource_path, graphing_library) else: correlated_plots = [] rpt = reporting_modules['report'](None, analysis.output_directory, os.path.join(analysis.output_directory, analysis.resource_path), analysis.resource_path, metrics['metrics'] + metrics['aggregate_metrics'], correlated_plots=correlated_plots, **report_args) rpt.generate() if not is_api_call: self._run_post(run_steps['post']) if self.return_exit_code: for metric in metrics['metrics'] + metrics['aggregate_metrics']: if metric.status == CONSTANTS.SLA_FAILED: return CONSTANTS.SLA_FAILURE return CONSTANTS.OK def diff(self, test_id_1, test_id_2, config=None, **kwargs): output_directory = os.path.join(self._output_directory, 'diff_' + str(test_id_1) + '_' + str(test_id_2)) if kwargs: if 'output_directory' in kwargs.keys(): output_directory = kwargs['output_directory'] diff_report = Diff([NaaradReport(self._analyses[test_id_1].output_directory, None), NaaradReport(self._analyses[test_id_2].output_directory, None)], 'diff', output_directory, os.path.join(output_directory, self._resource_path), self._resource_path) if config: naarad.utils.extract_diff_sla_from_config_file(diff_report, config) diff_report.generate() if diff_report.sla_failures > 0: return CONSTANTS.SLA_FAILURE if diff_report.status != 'OK': return CONSTANTS.ERROR return CONSTANTS.OK def diff_reports_by_location(self, report1_location, report2_location, output_directory, config=None, **kwargs): if kwargs: if 'output_directory' in kwargs.keys(): output_directory = kwargs['output_directory'] diff_report = Diff([NaaradReport(report1_location, None), NaaradReport(report2_location, None)], 'diff', output_directory, os.path.join(output_directory, self._resource_path), self._resource_path) if config: naarad.utils.extract_diff_sla_from_config_file(diff_report, config) diff_report.generate() if diff_report.sla_failures > 0: return CONSTANTS.SLA_FAILURE if diff_report.status != 'OK': return CONSTANTS.ERROR return CONSTANTS.OK
Apache License 2.0
min-ops/cruddy
cruddy/scripts/cli.py
get
python
def get(handler, item_id, decrypt): data = {'operation': 'get', 'decrypt': decrypt, 'id': item_id} handler.invoke(data)
Get an item
https://github.com/min-ops/cruddy/blob/5ef4e9a59514d632ac685574443344056bd19880/cruddy/scripts/cli.py#L130-L135
import json import click from cruddy import CRUD from cruddy.lambdaclient import LambdaClient class CLIHandler(object): def __init__(self, profile_name, region_name, lambda_fn, config_file, debug=False): self.lambda_fn = lambda_fn self.lambda_client = None if lambda_fn: self.lambda_client = LambdaClient( profile_name=profile_name, region_name=region_name, func_name=lambda_fn, debug=debug) if config_file: config = json.load(config_file) self.crud = CRUD(**config) self.debug = debug def _handle_response(self, response): if response.status == 'success': click.echo(json.dumps(response.data, indent=4)) else: click.echo(click.style(response.status, fg='red')) click.echo(click.style(response.error_type, fg='red')) click.echo(click.style(response.error_message, fg='red')) def _invoke_lambda(self, payload, raw): response = self.lambda_client.invoke(payload) if raw: return response self._handle_response(response) def _invoke_cruddy(self, payload, raw): response = self.crud.handler(**payload) if raw: return response self._handle_response(response) def invoke(self, payload, raw=False): if self.lambda_fn: return self._invoke_lambda(payload, raw) elif self.crud: return self._invoke_cruddy(payload, raw) else: msg = 'You must specify either --lambda-fn or --config' click.echo(click.style(msg, fg='red')) pass_handler = click.make_pass_decorator(CLIHandler) @click.group() @click.option( '--profile', default=None, help='AWS credential profile') @click.option( '--region', default=None, help='AWS region') @click.option( '--lambda-fn', help='AWS Lambda controller name') @click.option( '--config', help='cruddy config file', type=click.File('rb')) @click.option( '--debug/--no-debug', default=False, help='Turn on debugging output' ) @click.version_option('0.11.1') @click.pass_context def cli(ctx, profile, region, lambda_fn, config, debug): ctx.obj = CLIHandler(profile, region, lambda_fn, config, debug) @cli.command() @pass_handler def describe(handler): data = {'operation': 'describe'} handler.invoke(data) @cli.command() @pass_handler def list(handler): data = {'operation': 'list'} handler.invoke(data) @cli.command() @click.option( '--decrypt/--no-decrypt', default=False, help='Decrypt any encrypted attributes') @click.argument('item_id', nargs=1) @pass_handler
Apache License 2.0
matej-ulicny/harmonic-networks
imagenet/resnext/timm/models/gluon_resnet.py
gluon_seresnext101_32x4d
python
def gluon_seresnext101_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_seresnext101_32x4d'] model = GluonResNet( BottleneckGl, [3, 4, 23, 3], cardinality=32, base_width=4, use_se=True, num_classes=num_classes, in_chans=in_chans, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model
Constructs a SEResNeXt-101-32x4d model.
https://github.com/matej-ulicny/harmonic-networks/blob/0fccf674806a0b876e641ef5271aad520ff90739/imagenet/resnext/timm/models/gluon_resnet.py#L633-L643
import math import torch import torch.nn as nn import torch.nn.functional as F from .registry import register_model from .helpers import load_pretrained from .adaptive_avgmax_pool import SelectAdaptivePool2d from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD __all__ = ['GluonResNet'] def _cfg(url='', **kwargs): return { 'url': url, 'num_classes': 1000, 'input_size': (3, 224, 224), 'pool_size': (7, 7), 'crop_pct': 0.875, 'interpolation': 'bicubic', 'mean': IMAGENET_DEFAULT_MEAN, 'std': IMAGENET_DEFAULT_STD, 'first_conv': 'conv1', 'classifier': 'fc', **kwargs } default_cfgs = { 'gluon_resnet18_v1b': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet18_v1b-0757602b.pth'), 'gluon_resnet34_v1b': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet34_v1b-c6d82d59.pth'), 'gluon_resnet50_v1b': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1b-0ebe02e2.pth'), 'gluon_resnet101_v1b': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1b-3b017079.pth'), 'gluon_resnet152_v1b': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1b-c1edb0dd.pth'), 'gluon_resnet50_v1c': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1c-48092f55.pth'), 'gluon_resnet101_v1c': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1c-1f26822a.pth'), 'gluon_resnet152_v1c': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1c-a3bb0b98.pth'), 'gluon_resnet50_v1d': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1d-818a1b1b.pth'), 'gluon_resnet101_v1d': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1d-0f9c8644.pth'), 'gluon_resnet152_v1d': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1d-bd354e12.pth'), 'gluon_resnet50_v1e': _cfg(url=''), 'gluon_resnet101_v1e': _cfg(url=''), 'gluon_resnet152_v1e': _cfg(url=''), 'gluon_resnet50_v1s': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet50_v1s-1762acc0.pth'), 'gluon_resnet101_v1s': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet101_v1s-60fe0cc1.pth'), 'gluon_resnet152_v1s': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnet152_v1s-dcc41b81.pth'), 'gluon_resnext50_32x4d': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnext50_32x4d-e6a097c1.pth'), 'gluon_resnext101_32x4d': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnext101_32x4d-b253c8c4.pth'), 'gluon_resnext101_64x4d': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_resnext101_64x4d-f9a8e184.pth'), 'gluon_seresnext50_32x4d': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_seresnext50_32x4d-90cf2d6e.pth'), 'gluon_seresnext101_32x4d': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_seresnext101_32x4d-cf52900d.pth'), 'gluon_seresnext101_64x4d': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_seresnext101_64x4d-f9926f93.pth'), 'gluon_senet154': _cfg(url='https://github.com/rwightman/pytorch-pretrained-gluonresnet/releases/download/v0.1/gluon_senet154-70a1a3c0.pth'), } def _get_padding(kernel_size, stride, dilation=1): padding = ((stride - 1) + dilation * (kernel_size - 1)) // 2 return padding class SEModule(nn.Module): def __init__(self, channels, reduction_channels): super(SEModule, self).__init__() self.fc1 = nn.Conv2d( channels, reduction_channels, kernel_size=1, padding=0, bias=True) self.relu = nn.ReLU() self.fc2 = nn.Conv2d( reduction_channels, channels, kernel_size=1, padding=0, bias=True) self.sigmoid = nn.Sigmoid() def forward(self, x): module_input = x x = x.view(x.size(0), x.size(1), -1).mean(-1).view(x.size(0), x.size(1), 1, 1) x = self.fc1(x) x = self.relu(x) x = self.fc2(x) x = self.sigmoid(x) return module_input * x class BasicBlockGl(nn.Module): expansion = 1 def __init__(self, inplanes, planes, stride=1, downsample=None, cardinality=1, base_width=64, use_se=False, reduce_first=1, dilation=1, previous_dilation=1, norm_layer=nn.BatchNorm2d): super(BasicBlockGl, self).__init__() assert cardinality == 1, 'BasicBlock only supports cardinality of 1' assert base_width == 64, 'BasicBlock doest not support changing base width' first_planes = planes // reduce_first outplanes = planes * self.expansion self.conv1 = nn.Conv2d( inplanes, first_planes, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, bias=False) self.bn1 = norm_layer(first_planes) self.relu = nn.ReLU() self.conv2 = nn.Conv2d( first_planes, outplanes, kernel_size=3, padding=previous_dilation, dilation=previous_dilation, bias=False) self.bn2 = norm_layer(outplanes) self.se = SEModule(outplanes, planes // 4) if use_se else None self.downsample = downsample self.stride = stride self.dilation = dilation def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) if self.se is not None: out = self.se(out) if self.downsample is not None: residual = self.downsample(x) out += residual out = self.relu(out) return out class BottleneckGl(nn.Module): expansion = 4 def __init__(self, inplanes, planes, stride=1, downsample=None, cardinality=1, base_width=64, use_se=False, reduce_first=1, dilation=1, previous_dilation=1, norm_layer=nn.BatchNorm2d): super(BottleneckGl, self).__init__() width = int(math.floor(planes * (base_width / 64)) * cardinality) first_planes = width // reduce_first outplanes = planes * self.expansion self.conv1 = nn.Conv2d(inplanes, first_planes, kernel_size=1, bias=False) self.bn1 = norm_layer(first_planes) self.conv2 = nn.Conv2d( first_planes, width, kernel_size=3, stride=stride, padding=dilation, dilation=dilation, groups=cardinality, bias=False) self.bn2 = norm_layer(width) self.conv3 = nn.Conv2d(width, outplanes, kernel_size=1, bias=False) self.bn3 = norm_layer(outplanes) self.se = SEModule(outplanes, planes // 4) if use_se else None self.relu = nn.ReLU() self.downsample = downsample self.stride = stride self.dilation = dilation def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) out = self.relu(out) out = self.conv3(out) out = self.bn3(out) if self.se is not None: out = self.se(out) if self.downsample is not None: residual = self.downsample(x) out += residual out = self.relu(out) return out class GluonResNet(nn.Module): def __init__(self, block, layers, num_classes=1000, in_chans=3, use_se=False, cardinality=1, base_width=64, stem_width=64, deep_stem=False, block_reduce_first=1, down_kernel_size=1, avg_down=False, dilated=False, norm_layer=nn.BatchNorm2d, drop_rate=0.0, global_pool='avg'): self.num_classes = num_classes self.inplanes = stem_width * 2 if deep_stem else 64 self.cardinality = cardinality self.base_width = base_width self.drop_rate = drop_rate self.expansion = block.expansion self.dilated = dilated super(GluonResNet, self).__init__() if not deep_stem: self.conv1 = nn.Conv2d(in_chans, stem_width, kernel_size=7, stride=2, padding=3, bias=False) else: conv1_modules = [ nn.Conv2d(in_chans, stem_width, 3, stride=2, padding=1, bias=False), norm_layer(stem_width), nn.ReLU(), nn.Conv2d(stem_width, stem_width, 3, stride=1, padding=1, bias=False), norm_layer(stem_width), nn.ReLU(), nn.Conv2d(stem_width, self.inplanes, 3, stride=1, padding=1, bias=False), ] self.conv1 = nn.Sequential(*conv1_modules) self.bn1 = norm_layer(self.inplanes) self.relu = nn.ReLU() self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) stride_3_4 = 1 if self.dilated else 2 dilation_3 = 2 if self.dilated else 1 dilation_4 = 4 if self.dilated else 1 self.layer1 = self._make_layer( block, 64, layers[0], stride=1, reduce_first=block_reduce_first, use_se=use_se, avg_down=avg_down, down_kernel_size=1, norm_layer=norm_layer) self.layer2 = self._make_layer( block, 128, layers[1], stride=2, reduce_first=block_reduce_first, use_se=use_se, avg_down=avg_down, down_kernel_size=down_kernel_size, norm_layer=norm_layer) self.layer3 = self._make_layer( block, 256, layers[2], stride=stride_3_4, dilation=dilation_3, reduce_first=block_reduce_first, use_se=use_se, avg_down=avg_down, down_kernel_size=down_kernel_size, norm_layer=norm_layer) self.layer4 = self._make_layer( block, 512, layers[3], stride=stride_3_4, dilation=dilation_4, reduce_first=block_reduce_first, use_se=use_se, avg_down=avg_down, down_kernel_size=down_kernel_size, norm_layer=norm_layer) self.global_pool = SelectAdaptivePool2d(pool_type=global_pool) self.num_features = 512 * block.expansion self.fc = nn.Linear(self.num_features * self.global_pool.feat_mult(), num_classes) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') elif isinstance(m, nn.BatchNorm2d): nn.init.constant_(m.weight, 1.) nn.init.constant_(m.bias, 0.) def _make_layer(self, block, planes, blocks, stride=1, dilation=1, reduce_first=1, use_se=False, avg_down=False, down_kernel_size=1, norm_layer=nn.BatchNorm2d): downsample = None if stride != 1 or self.inplanes != planes * block.expansion: downsample_padding = _get_padding(down_kernel_size, stride) if avg_down: avg_stride = stride if dilation == 1 else 1 downsample_layers = [ nn.AvgPool2d(avg_stride, avg_stride, ceil_mode=True, count_include_pad=False), nn.Conv2d(self.inplanes, planes * block.expansion, down_kernel_size, stride=1, padding=downsample_padding, bias=False), norm_layer(planes * block.expansion), ] else: downsample_layers = [ nn.Conv2d(self.inplanes, planes * block.expansion, down_kernel_size, stride=stride, padding=downsample_padding, bias=False), norm_layer(planes * block.expansion), ] downsample = nn.Sequential(*downsample_layers) first_dilation = 1 if dilation in (1, 2) else 2 layers = [block( self.inplanes, planes, stride, downsample, cardinality=self.cardinality, base_width=self.base_width, reduce_first=reduce_first, use_se=use_se, dilation=first_dilation, previous_dilation=dilation, norm_layer=norm_layer)] self.inplanes = planes * block.expansion for i in range(1, blocks): layers.append(block( self.inplanes, planes, cardinality=self.cardinality, base_width=self.base_width, reduce_first=reduce_first, use_se=use_se, dilation=dilation, previous_dilation=dilation, norm_layer=norm_layer)) return nn.Sequential(*layers) def get_classifier(self): return self.fc def reset_classifier(self, num_classes, global_pool='avg'): self.global_pool = SelectAdaptivePool2d(pool_type=global_pool) self.num_classes = num_classes del self.fc if num_classes: self.fc = nn.Linear(self.num_features * self.global_pool.feat_mult(), num_classes) else: self.fc = None def forward_features(self, x, pool=True): x = self.conv1(x) x = self.bn1(x) x = self.relu(x) x = self.maxpool(x) x = self.layer1(x) x = self.layer2(x) x = self.layer3(x) x = self.layer4(x) if pool: x = self.global_pool(x) x = x.view(x.size(0), -1) return x def forward(self, x): x = self.forward_features(x) if self.drop_rate > 0.: x = F.dropout(x, p=self.drop_rate, training=self.training) x = self.fc(x) return x @register_model def gluon_resnet18_v1b(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet18_v1b'] model = GluonResNet(BasicBlockGl, [2, 2, 2, 2], num_classes=num_classes, in_chans=in_chans, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet34_v1b(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet34_v1b'] model = GluonResNet(BasicBlockGl, [3, 4, 6, 3], num_classes=num_classes, in_chans=in_chans, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet50_v1b(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet50_v1b'] model = GluonResNet(BottleneckGl, [3, 4, 6, 3], num_classes=num_classes, in_chans=in_chans, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet101_v1b(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet101_v1b'] model = GluonResNet(BottleneckGl, [3, 4, 23, 3], num_classes=num_classes, in_chans=in_chans, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet152_v1b(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet152_v1b'] model = GluonResNet(BottleneckGl, [3, 8, 36, 3], num_classes=num_classes, in_chans=in_chans, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet50_v1c(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet50_v1c'] model = GluonResNet(BottleneckGl, [3, 4, 6, 3], num_classes=num_classes, in_chans=in_chans, stem_width=32, deep_stem=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet101_v1c(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet101_v1c'] model = GluonResNet(BottleneckGl, [3, 4, 23, 3], num_classes=num_classes, in_chans=in_chans, stem_width=32, deep_stem=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet152_v1c(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet152_v1c'] model = GluonResNet(BottleneckGl, [3, 8, 36, 3], num_classes=num_classes, in_chans=in_chans, stem_width=32, deep_stem=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet50_v1d(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet50_v1d'] model = GluonResNet(BottleneckGl, [3, 4, 6, 3], num_classes=num_classes, in_chans=in_chans, stem_width=32, deep_stem=True, avg_down=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet101_v1d(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet101_v1d'] model = GluonResNet(BottleneckGl, [3, 4, 23, 3], num_classes=num_classes, in_chans=in_chans, stem_width=32, deep_stem=True, avg_down=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet152_v1d(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet152_v1d'] model = GluonResNet(BottleneckGl, [3, 8, 36, 3], num_classes=num_classes, in_chans=in_chans, stem_width=32, deep_stem=True, avg_down=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet50_v1e(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet50_v1e'] model = GluonResNet(BottleneckGl, [3, 4, 6, 3], num_classes=num_classes, in_chans=in_chans, stem_width=64, deep_stem=True, avg_down=True, **kwargs) model.default_cfg = default_cfg return model @register_model def gluon_resnet101_v1e(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet101_v1e'] model = GluonResNet(BottleneckGl, [3, 4, 23, 3], num_classes=num_classes, in_chans=in_chans, stem_width=64, deep_stem=True, avg_down=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet152_v1e(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet152_v1e'] model = GluonResNet(BottleneckGl, [3, 8, 36, 3], num_classes=num_classes, in_chans=in_chans, stem_width=64, deep_stem=True, avg_down=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet50_v1s(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet50_v1s'] model = GluonResNet(BottleneckGl, [3, 4, 6, 3], num_classes=num_classes, in_chans=in_chans, stem_width=64, deep_stem=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet101_v1s(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet101_v1s'] model = GluonResNet(BottleneckGl, [3, 4, 23, 3], num_classes=num_classes, in_chans=in_chans, stem_width=64, deep_stem=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnet152_v1s(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnet152_v1s'] model = GluonResNet(BottleneckGl, [3, 8, 36, 3], num_classes=num_classes, in_chans=in_chans, stem_width=64, deep_stem=True, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnext50_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnext50_32x4d'] model = GluonResNet( BottleneckGl, [3, 4, 6, 3], cardinality=32, base_width=4, num_classes=num_classes, in_chans=in_chans, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnext101_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnext101_32x4d'] model = GluonResNet( BottleneckGl, [3, 4, 23, 3], cardinality=32, base_width=4, num_classes=num_classes, in_chans=in_chans, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_resnext101_64x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_resnext101_64x4d'] model = GluonResNet( BottleneckGl, [3, 4, 23, 3], cardinality=64, base_width=4, num_classes=num_classes, in_chans=in_chans, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model def gluon_seresnext50_32x4d(pretrained=False, num_classes=1000, in_chans=3, **kwargs): default_cfg = default_cfgs['gluon_seresnext50_32x4d'] model = GluonResNet( BottleneckGl, [3, 4, 6, 3], cardinality=32, base_width=4, use_se=True, num_classes=num_classes, in_chans=in_chans, **kwargs) model.default_cfg = default_cfg if pretrained: load_pretrained(model, default_cfg, num_classes, in_chans) return model @register_model
BSD 3-Clause New or Revised License
django-simple-api/django-simple-api
django_simple_api/decorators.py
describe_response
python
def describe_response( status: Union[int, HTTPStatus], description: str = "", *, content: Union[Type[BaseModel], dict, type] = None, headers: dict = None, links: dict = None, ) -> Callable[[T], T]: status = int(status) if not description: try: description = HTTPStatus(status).description except ValueError: description = "User-defined status code" def decorator(func: T) -> T: if not hasattr(func, "__responses__"): responses: Dict[int, Dict[str, Any]] = {} setattr(func, "__responses__", responses) else: responses = getattr(func, "__responses__") if ( content is None or isinstance(content, dict) or ( not isinstance(content, GenericType) and isclass(content) and issubclass(content, BaseModel) ) ): real_content = content else: real_content = create_model( f"ParsingModel[{display_as_type(content)}]", __root__=(content, ...) ) response = { "description": description, "content": real_content, "headers": headers, "links": links, } responses[status] = {k: v for k, v in response.items() if v} return func return decorator
Describe a response in HTTP view function https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.0.md#responseObject
https://github.com/django-simple-api/django-simple-api/blob/55c037dca9623cf6a4d41bd83acf9bb5b9fb7a6b/django_simple_api/decorators.py#L41-L93
import sys from http import HTTPStatus from inspect import isclass from typing import Any, Callable, Dict, List, Type, TypeVar, Union from django.views import View from pydantic import BaseModel, create_model from pydantic.utils import display_as_type from .extras import describe_extra_docs if sys.version_info >= (3, 9): from types import GenericAlias GenericType = (GenericAlias, type(List[str])) else: GenericType = (type(List[str]),) T = TypeVar("T", bound=Callable) def allow_request_method(method: str) -> Callable[[T], T]: if method not in View.http_method_names: raise ValueError(f"`method` must in {View.http_method_names}") def wrapper(view_func: T) -> T: if isclass(view_func): raise TypeError("Can only be used for functions") setattr(view_func, "__method__", method.upper()) return view_func return wrapper
MIT License
aceinna/python-openimu
src/aceinna/devices/openimu/uart_provider.py
Provider.thread_do_mag_align
python
def thread_do_mag_align(self): try: command_line = helper.build_input_packet( 'ma', self.properties, 'start') result = yield self._message_center.build(command=command_line, timeout=3) time.sleep(1) has_result = False while not has_result: command_line = helper.build_input_packet( 'ma', self.properties, 'status') result = yield self._message_center.build(command=command_line) if not self.is_mag_align: break if result['data'] == [0]: has_result = True else: time.sleep(0.5) if not has_result: return command_line = helper.build_input_packet( 'ma', self.properties, 'stored') result = yield self._message_center.build(command=command_line) mag_value = dict() if len(result['data']) > 0: decoded_status = binascii.hexlify(bytes(result['data'])) mag_value = self.decode_mag_align_output(decoded_status) else: command_line = helper.build_input_packet( 'ma', self.properties, 'abort') self.is_mag_align = False self.add_output_packet('mag_status', { 'status': 'complete', 'value': mag_value }) except Exception as ex: APP_CONTEXT.get_logger().error(ex) self.is_mag_align = False self.add_output_packet('mag_status', { 'status': 'error' })
Do mag align
https://github.com/aceinna/python-openimu/blob/5cb6d35fb683d5847c0d268c71cdf63c895c5d34/src/aceinna/devices/openimu/uart_provider.py#L562-L616
import os import re import sys import time import json import binascii import math import datetime import threading import struct from azure.storage.blob import BlockBlobService from ...framework.utils import helper from ...framework.utils import resource from ..base import OpenDeviceBase from ..configs.openimu_predefine import ( APP_STR, DEFAULT_PRODUCT_NAME, get_openimu_products ) from ...framework.context import APP_CONTEXT from ..decorator import with_device_message from ...framework.configuration import get_config from ..upgrade_workers import ( FirmwareUpgradeWorker, JumpBootloaderWorker, JumpApplicationWorker, UPGRADE_EVENT ) from ...framework.utils.print import print_yellow class Provider(OpenDeviceBase): def __init__(self, communicator, *args): super(Provider, self).__init__(communicator) self.type = 'IMU' self.server_update_rate = 50 self.is_logging = False self.is_mag_align = False self.bootloader_baudrate = 57600 self.device_info = None self.app_info = None self.app_config_folder = '' self.parameters = None self.enable_data_log = True self.prepare_folders() self.is_backup = False self.is_restore = False self.is_app_matched = False self.connected = True def prepare_folders(self): executor_path = resource.get_executor_path() setting_folder_name = 'setting' config_file_name = 'openimu.json' data_folder_path = os.path.join(executor_path, 'data') if not os.path.isdir(data_folder_path): os.makedirs(data_folder_path) self.setting_folder_path = os.path.join( executor_path, setting_folder_name) all_products = get_openimu_products() for product in all_products: product_folder = os.path.join(self.setting_folder_path, product) if not os.path.isdir(product_folder): os.makedirs(product_folder) for app_name in all_products[product]: app_name_path = os.path.join(product_folder, app_name) app_name_config_path = os.path.join( app_name_path, config_file_name) if not os.path.isfile(app_name_config_path): if not os.path.isdir(app_name_path): os.makedirs(app_name_path) app_config_content = resource.get_content_from_bundle( setting_folder_name, os.path.join(product, app_name, config_file_name)) if app_config_content is None: continue with open(app_name_config_path, "wb") as code: code.write(app_config_content) @property def is_in_bootloader(self): if not self.device_info or not self.device_info.__contains__('name'): return False if 'bootloader' in self.device_info['name'].lower(): return True return False def bind_device_info(self, device_access, device_info, app_info): self._build_device_info(device_info) self._build_app_info(app_info) self.connected = True return '# Connected {0} #\n\rDevice: {1} \n\rFirmware: {2}' .format('OpenIMU', device_info, app_info) def _build_device_info(self, text): split_text = [x for x in text.split(' ') if x != '' and x != '\x00'] split_len = len(split_text) if split_len < 3: self.device_info = { 'name': split_text[0], 'product_name': split_text[0], 'pn': '-', 'firmware_version': '-', 'sn': '-' } return if split_len == 3: pre_sn = split_text[2].split(':') serial_num = pre_sn[1] if len(pre_sn) == 2 else pre_sn[0] self.device_info = { 'name': split_text[0], 'product_name': split_text[0], 'pn': split_text[1], 'firmware_version': '-', 'sn': serial_num } return pre_sn = split_text[-1].split(':') serial_num = pre_sn[1] if len(pre_sn) == 2 else '' self.device_info = { 'name': ' '.join(split_text[0:-3]), 'product_name': split_text[0], 'pn': split_text[-3], 'firmware_version': split_text[-2], 'sn': serial_num } def _build_app_info(self, text): product_name = '' app_version = text can_indicator = '_J1939' if can_indicator in app_version: app_version = app_version.replace(can_indicator, '') split_text = [x for x in app_version.split(' ') if x != ''] app_name = next( (item for item in APP_STR if item in split_text), None) if len(split_text) == 3: product_name = split_text[0] if not app_name: app_name = 'IMU' self.is_app_matched = False else: self.is_app_matched = True self.app_info = { 'app_name': app_name, 'version': text, 'product_name': product_name } def load_properties(self): local_config_file_path = os.path.join(os.getcwd(), 'openimu.json') if os.path.isfile(local_config_file_path): with open(local_config_file_path) as json_data: self.properties = json.load(json_data) return product_name = self.app_info['product_name'] if self.app_info['product_name'] else self.device_info['product_name'] app_name = self.app_info['app_name'] app_file_path = os.path.join( self.setting_folder_path, product_name, app_name, 'openimu.json') if not os.path.isfile(app_file_path): app_file_path = os.path.join( self.setting_folder_path, DEFAULT_PRODUCT_NAME, app_name, 'openimu.json') if not self.is_app_matched: print_yellow( 'Failed to extract app version information from unit.' + '\nThe supported application list is {0}.'.format(APP_STR) + '\nTo keep runing, use IMU configuration as default.' + '\nYou can choose to place your json file under execution path if it is an unknown application.') with open(app_file_path) as json_data: self.properties = json.load(json_data) def after_setup(self): if hasattr(self.communicator, 'serial_port'): self.original_baudrate = self.communicator.serial_port.baudrate def on_read_raw(self, data): pass def on_receive_output_packet(self, packet_type, data, *args, **kwargs): self.add_output_packet(packet_type, data) def get_log_info(self): packet_rate = next( (item['value'] for item in self.parameters if item['name'] == 'Packet Rate'), '100') return { "type": self.type, "model": self.device_info['name'], "logInfo": { "pn": self.device_info['pn'], "sn": self.device_info['sn'], "sampleRate": packet_rate, "appVersion": self.app_info['version'], "imuProperties": json.dumps(self.properties) } } def before_jump_app_command(self): self.communicator.serial_port.baudrate = self.bootloader_baudrate def after_jump_app_command(self): self.communicator.serial_port.baudrate = self.original_baudrate def before_write_content(self): self.communicator.serial_port.baudrate = self.bootloader_baudrate self.communicator.serial_port.reset_input_buffer() def firmware_write_command_generator(self, data_len, current, data): command_WA = 'WA' message_bytes = [] message_bytes.extend(struct.pack('>I', current)) message_bytes.extend(struct.pack('B', data_len)) message_bytes.extend(data) return helper.build_packet(command_WA, message_bytes) def get_upgrade_workers(self, firmware_content): firmware_worker = FirmwareUpgradeWorker( self.communicator, firmware_content, self.firmware_write_command_generator) firmware_worker.on(UPGRADE_EVENT.BEFORE_WRITE, lambda: self.before_write_content()) firmware_worker.on( UPGRADE_EVENT.FIRST_PACKET, lambda: time.sleep(8)) jump_bootloader_command = helper.build_bootloader_input_packet( 'JI') jump_bootloader_worker = JumpBootloaderWorker( self.communicator, command=jump_bootloader_command, listen_packet='JI', wait_timeout_after_command=3) jump_application_command = helper.build_bootloader_input_packet('JA') jump_application_worker = JumpApplicationWorker( self.communicator, command=jump_application_command, listen_packet='JA', wait_timeout_after_command=3) jump_application_worker.on( UPGRADE_EVENT.BEFORE_COMMAND, self.before_jump_app_command) jump_application_worker.on( UPGRADE_EVENT.AFTER_COMMAND, self.after_jump_app_command) return [jump_bootloader_worker, firmware_worker, jump_application_worker] def get_device_connection_info(self): return { 'modelName': self.device_info['name'], 'deviceType': self.type, 'serialNumber': self.device_info['sn'], 'partNumber': self.device_info['pn'], 'firmware': self.device_info['firmware_version'] } def get_operation_status(self): if self.is_logging: return 'LOGGING' if self.is_upgrading: return 'UPGRADING' if self.is_mag_align: return 'MAG_ALIGN' if self.is_backup: return 'BACKUP' if self.is_restore: return 'RESTORE' return 'IDLE' def get_device_info(self, *args): return { 'packetType': 'deviceInfo', 'data': [ {'name': 'Product Name', 'value': self.device_info['name']}, {'name': 'PN', 'value': self.device_info['pn']}, {'name': 'Firmware Version', 'value': self.device_info['firmware_version']}, {'name': 'SN', 'value': self.device_info['sn']}, {'name': 'App Version', 'value': self.app_info['version']} ] } def get_conf(self, *args): return { 'packetType': 'conf', 'data': { 'outputs': self.properties['userMessages']['outputPackets'], 'inputParams': self.properties['userConfiguration'] } } @with_device_message def get_params(self, *args): command_line = helper.build_input_packet('gA') result = yield self._message_center.build(command=command_line, timeout=3) data = result['data'] if data: self.parameters = data yield { 'packetType': 'inputParams', 'data': data } yield { 'packetType': 'error', 'data': 'No Response' } @with_device_message def get_param(self, params, *args): command_line = helper.build_input_packet( 'gP', properties=self.properties, param=params['paramId']) result = yield self._message_center.build(command=command_line) data = result['data'] error = result['error'] if error: yield { 'packetType': 'error', 'data': 'No Response' } if data: yield { 'packetType': 'inputParam', 'data': data } yield { 'packetType': 'error', 'data': 'No Response' } @with_device_message def set_params(self, params, *args): for parameter in params: command_line = helper.build_input_packet( 'uP', properties=self.properties, param=parameter['paramId'], value=parameter['value']) result = yield self._message_center.build(command=command_line) packet_type = result['packet_type'] data = result['data'] if packet_type == 'error': yield { 'packetType': 'error', 'data': { 'error': data } } if data > 0: yield { 'packetType': 'error', 'data': { 'error': data } } yield { 'packetType': 'success', 'data': { 'error': 0 } } @with_device_message def set_param(self, params, *args): try: command_line = helper.build_input_packet( 'uP', properties=self.properties, param=params['paramId'], value=params['value']) except: yield { 'packetType': 'error', 'data': { 'error': params['paramId'] } } result = yield self._message_center.build(command=command_line) error = result['error'] data = result['data'] if error: yield { 'packetType': 'error', 'data': { 'error': data } } yield { 'packetType': 'success', 'data': { 'error': data } } @with_device_message def reset_params(self, *args): command_line = helper.build_input_packet('rD') result = yield self._message_center.build(command=command_line, timeout=2) error = result['error'] data = result['data'] if error: yield { 'packetType': 'error', 'data': { 'error': error } } yield { 'packetType': 'success', 'data': data } @with_device_message def save_config(self, *args): command_line = helper.build_input_packet('sC') result = yield self._message_center.build(command=command_line) data = result['data'] error = result['error'] if data: yield { 'packetType': 'success', 'data': data } yield { 'packetType': 'success', 'data': error } @with_device_message def run_command(self, params, *args): bytes_str_in_array = re.findall('([a-f|0-9|A-F]{2})', params) command_line = bytes([int(item, 16) for item in bytes_str_in_array]) result = yield self._message_center.build(command=command_line, timeout=2) error = result['error'] raw = result['raw'] if error: yield { 'packetType': 'error', 'data': { 'error': 'Runtime Error', 'message': 'The device cannot response the command' } } yield { 'packetType': 'success', 'data': raw } def mag_align_start(self, *args): if not self.is_mag_align: self.is_mag_align = True thread = threading.Thread( target=self.thread_do_mag_align, args=()) thread.start() return { 'packetType': 'success' } @with_device_message
Apache License 2.0
xethorn/garcon
tests/test_activity.py
activity_run
python
def activity_run( monkeypatch, boto_client, poll=None, complete=None, fail=None, execute=None): current_activity = activity.Activity(boto_client) poll = poll or dict() monkeypatch.setattr( current_activity, 'execute_activity', execute or MagicMock(return_value=dict())) monkeypatch.setattr( boto_client, 'poll_for_activity_task', MagicMock(return_value=poll)) monkeypatch.setattr( boto_client, 'respond_activity_task_completed', complete or MagicMock()) monkeypatch.setattr( boto_client, 'respond_activity_task_failed', fail or MagicMock()) return current_activity
Create an activity.
https://github.com/xethorn/garcon/blob/6eae7e66971e6a654b178b3360fd17506e993aba/tests/test_activity.py#L17-L36
from unittest.mock import MagicMock from unittest.mock import ANY import json import sys from botocore import exceptions import pytest from garcon import activity from garcon import event from garcon import runner from garcon import task from garcon import utils from tests.fixtures import decider
MIT License
nuagenetworks/vspk-python
vspk/v6/nuingressauditacltemplate.py
NUIngressAuditACLTemplate.__init__
python
def __init__(self, **kwargs): super(NUIngressAuditACLTemplate, self).__init__() self._name = None self._last_updated_by = None self._last_updated_date = None self._active = None self._default_allow_ip = None self._default_allow_non_ip = None self._description = None self._allow_address_spoof = None self._embedded_metadata = None self._entity_scope = None self._policy_state = None self._creation_date = None self._priority = None self._priority_type = None self._associated_live_entity_id = None self._associated_virtual_firewall_policy_id = None self._auto_generate_priority = None self._owner = None self._external_id = None self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=True, is_unique=False) self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="active", remote_name="active", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="default_allow_ip", remote_name="defaultAllowIP", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="default_allow_non_ip", remote_name="defaultAllowNonIP", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="allow_address_spoof", remote_name="allowAddressSpoof", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False) self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL']) self.expose_attribute(local_name="policy_state", remote_name="policyState", attribute_type=str, is_required=False, is_unique=False, choices=[u'DRAFT', u'LIVE']) self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="priority", remote_name="priority", attribute_type=int, is_required=False, is_unique=True) self.expose_attribute(local_name="priority_type", remote_name="priorityType", attribute_type=str, is_required=False, is_unique=False, choices=[u'TOP_AUDIT']) self.expose_attribute(local_name="associated_live_entity_id", remote_name="associatedLiveEntityID", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="associated_virtual_firewall_policy_id", remote_name="associatedVirtualFirewallPolicyID", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="auto_generate_priority", remote_name="autoGeneratePriority", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True) self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child") self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self.ingress_audit_acl_entry_templates = NUIngressAuditACLEntryTemplatesFetcher.fetcher_with_object(parent_object=self, relationship="child") self._compute_args(**kwargs)
Initializes a IngressAuditACLTemplate instance Notes: You can specify all parameters while calling this methods. A special argument named `data` will enable you to load the object from a Python dictionary Examples: >>> ingressauditacltemplate = NUIngressAuditACLTemplate(id=u'xxxx-xxx-xxx-xxx', name=u'IngressAuditACLTemplate') >>> ingressauditacltemplate = NUIngressAuditACLTemplate(data=my_dict)
https://github.com/nuagenetworks/vspk-python/blob/375cce10ae144ad6017104e57fcd3630898cc2a6/vspk/v6/nuingressauditacltemplate.py#L70-L143
from .fetchers import NUPermissionsFetcher from .fetchers import NUMetadatasFetcher from .fetchers import NUGlobalMetadatasFetcher from .fetchers import NUIngressAuditACLEntryTemplatesFetcher from bambou import NURESTObject class NUIngressAuditACLTemplate(NURESTObject): __rest_name__ = "ingressauditacltemplate" __resource_name__ = "ingressauditacltemplates" CONST_POLICY_STATE_DRAFT = "DRAFT" CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL" CONST_PRIORITY_TYPE_TOP_AUDIT = "TOP_AUDIT" CONST_POLICY_STATE_LIVE = "LIVE" CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
BSD 3-Clause New or Revised License
jorgemf/kaggle_redefining_cancer_treatment
src/distributed_training.py
launch_train_evaluation
python
def launch_train_evaluation(model_fn, log_dir, epochs, train_batch_size, train_datasest, eval_dataset, trainer_class=DistributedTrainer, evaluator_class=DistributedEvaluator): task_spec = get_task_spec(with_evaluator=True) if task_spec.num_workers <= 1: raise ValueError('More than one worker needed in order to perform a continuos evaluation') if task_spec.join_if_ps(): return if task_spec.is_evaluator(): evaluator = evaluator_class(log_dir=log_dir, dataset=eval_dataset, model_fn=model_fn) evaluator.run() else: trainer = trainer_class(log_dir=log_dir, dataset=train_datasest, model_fn=model_fn, task_spec=task_spec) trainer.run(batch_size=train_batch_size, epochs=epochs)
Launchs the training with an evaluator in the last worker. Only call this from distributed or it will fail. :param model_fn: function to create the model :param log_dir: directory for the logs/checkpoints :param epochs: number of epochs to perform the training :param train_batch_size: batch size of the trainer :param train_datasest: dataset for training :param eval_dataset: dataset for evaluation :param trainer_class: custom trainer, defaults to DistributedTrainer :param evaluator_class: custom trainer, defaults to DistributedEvaluator
https://github.com/jorgemf/kaggle_redefining_cancer_treatment/blob/d9e6b37231c5b3706f94de4d71bd9d9358e147a0/src/distributed_training.py#L80-L111
from .task_spec import get_task_spec from .trainer import Trainer from .evaluator import Evaluator import tensorflow as tf from tensorflow.python.training import training_util class DistributedTrainer(Trainer): def __init__(self, log_dir, dataset, model_fn, task_spec, **kwargs): self.model_fn = model_fn super(DistributedTrainer, self).__init__(log_dir=log_dir, dataset=dataset, task_spec=task_spec, **kwargs) def create_graph(self, dataset_tensor, batch_size): return self.model_fn(dataset_tensor=dataset_tensor, batch_size=batch_size, evaluation=False) def step(self, session, graph_data): session.run(graph_data) class DistributedEvaluator(Evaluator): def __init__(self, log_dir, dataset, model_fn, **kwargs): self.model_fn = model_fn super(DistributedEvaluator, self).__init__(checkpoints_dir=log_dir, dataset=dataset, **kwargs) self.eval_writer = tf.summary.FileWriter(log_dir) def create_graph(self, dataset_tensor, batch_size): self.graph_data = self.model_fn(dataset_tensor=dataset_tensor, batch_size=batch_size, evaluation=False) self.global_step = training_util.get_global_step() self.summary_op = tf.summary.merge_all() return self.graph_data def after_create_session(self, session, coord): super(DistributedEvaluator, self).after_create_session(session, coord) self.summary_file = tf.summary.FileWriter(self.checkpoints_dir + '/eval') def end(self, session): super(DistributedEvaluator, self).end(session) step = int(self.lastest_checkpoint.split('-')[-1]) self.summary_file.add_summary(self.summary, step) def step(self, session, graph_data): self.summary = session.run(self.summary_op) def model_fn_example(dataset_tensor, batch_size, evaluation): graph_data = None return graph_data
MIT License
interactive-sonification/sc3nb
src/sc3nb/sc_objects/server.py
SCServer.unresponsive
python
def unresponsive(self) -> bool: try: self.status() except OSCCommunicationError: return True else: return False
If the server process is unresponsive
https://github.com/interactive-sonification/sc3nb/blob/7d7fbd9178fe804c5c8ddd0ddd4075579221b7c4/src/sc3nb/sc_objects/server.py#L991-L998
import atexit import logging import warnings from enum import Enum, unique from queue import Empty from random import randint from typing import Any, Callable, Dict, List, NamedTuple, Optional, Sequence, Tuple from weakref import WeakValueDictionary from sc3nb.osc.osc_communication import ( MessageQueue, MessageQueueCollection, OSCCommunication, OSCCommunicationError, OSCMessage, ) from sc3nb.osc.parsing import preprocess_return from sc3nb.process_handling import ALLOWED_PARENTS, Process, ProcessTimeout from sc3nb.sc_objects.allocators import Allocator, BlockAllocator, NodeAllocator from sc3nb.sc_objects.buffer import BufferCommand, BufferReply from sc3nb.sc_objects.bus import Bus, BusRate, ControlBusCommand from sc3nb.sc_objects.node import ( Group, GroupCommand, GroupReply, Node, NodeCommand, NodeReply, SynthCommand, ) from sc3nb.sc_objects.synthdef import SynthDef, SynthDefinitionCommand from sc3nb.sc_objects.volume import Volume _LOGGER = logging.getLogger(__name__) @unique class MasterControlReply(str, Enum): VERSION_REPLY = "/version.reply" SYNCED = "/synced" STATUS_REPLY = "/status.reply" @unique class MasterControlCommand(str, Enum): DUMP_OSC = "/dumpOSC" STATUS = "/status" VERSION = "/version" CLEAR_SCHED = "/clearSched" NOTIFY = "/notify" QUIT = "/quit" SYNC = "/sync" @unique class ReplyAddress(str, Enum): WILDCARD_ADDR = "/*" FAIL_ADDR = "/fail" DONE_ADDR = "/done" RETURN_ADDR = "/return" ASYNC_CMDS = [ MasterControlCommand.QUIT, MasterControlCommand.NOTIFY, SynthDefinitionCommand.RECV, SynthDefinitionCommand.LOAD, SynthDefinitionCommand.LOAD_DIR, BufferCommand.ALLOC, BufferCommand.ALLOC_READ, BufferCommand.ALLOC_READ_CHANNEL, BufferCommand.READ, BufferCommand.READ_CHANNEL, BufferCommand.WRITE, BufferCommand.FREE, BufferCommand.ZERO, BufferCommand.GEN, BufferCommand.CLOSE, ] CMD_PAIRS = { MasterControlCommand.STATUS: MasterControlReply.STATUS_REPLY, MasterControlCommand.SYNC: MasterControlReply.SYNCED, MasterControlCommand.VERSION: MasterControlReply.VERSION_REPLY, SynthCommand.S_GET: NodeCommand.SET, SynthCommand.S_GETN: NodeCommand.SETN, GroupCommand.QUERY_TREE: GroupReply.QUERY_TREE_REPLY, NodeCommand.QUERY: NodeReply.INFO, BufferCommand.QUERY: BufferReply.INFO, BufferCommand.GET: BufferCommand.SET, BufferCommand.GETN: BufferCommand.SETN, ControlBusCommand.GET: ControlBusCommand.SET, ControlBusCommand.GETN: ControlBusCommand.SETN, } LOCALHOST = "127.0.0.1" SC3NB_SERVER_CLIENT_ID = 1 SC3NB_DEFAULT_PORT = 57130 SCSYNTH_DEFAULT_PORT = 57110 SC3_SERVER_NAME = "scsynth" class ServerStatus(NamedTuple): num_ugens: int num_synths: int num_groups: int num_synthdefs: int avg_cpu: float peak_cpu: float nominal_sr: float actual_sr: float class ServerVersion(NamedTuple): name: str major_version: int minor_version: int patch_version: str git_branch: str commit: str class ServerOptions: def __init__( self, udp_port: int = SCSYNTH_DEFAULT_PORT, max_logins: int = 6, num_input_buses: int = 2, num_output_buses: int = 2, num_audio_buses: int = 1024, num_control_buses: int = 4096, num_sample_buffers: int = 1024, publish_rendezvous: bool = False, block_size: Optional[int] = None, hardware_buffer_size: Optional[int] = None, hardware_sample_size: Optional[int] = None, hardware_input_device: Optional[str] = None, hardware_output_device: Optional[str] = None, other_options: Optional[Sequence[str]] = None, ): self.options = [] self.udp_port = udp_port self.options += ["-u", f"{self.udp_port}"] if 3 <= max_logins <= 32: self.max_logins = max_logins else: raise ValueError("max logins must be between 3 and 32") self.options += ["-l", f"{self.max_logins}"] self.num_input_buses = num_input_buses self.options += ["-i", f"{self.num_input_buses}"] self.num_output_buses = num_output_buses self.options += ["-o", f"{self.num_output_buses}"] if num_audio_buses < num_input_buses + num_output_buses: raise ValueError( f"You need at least {num_input_buses + num_output_buses} audio buses" ) self.num_audio_buses = num_audio_buses self.options += ["-a", f"{self.num_audio_buses}"] self.num_control_buses = num_control_buses self.options += ["-c", f"{self.num_control_buses}"] self.num_sample_buffers = num_sample_buffers self.options += ["-b", f"{self.num_sample_buffers}"] self.publish_rendezvous = 1 if publish_rendezvous else 0 self.options += ["-R", f"{self.publish_rendezvous}"] if block_size is not None: self.block_size = block_size self.options += ["-z", f"{self.block_size}"] if hardware_buffer_size is not None: self.hardware_buffer_size = hardware_buffer_size self.options += ["-Z", f"{self.hardware_buffer_size}"] if hardware_sample_size is not None: self.hardware_sample_size = hardware_sample_size self.options += ["-S", f"{self.hardware_sample_size}"] if not hardware_input_device: self.hardware_input_device = "" else: self.hardware_input_device = hardware_input_device if not hardware_output_device: self.hardware_output_device = "" else: self.hardware_output_device = hardware_output_device if hardware_input_device or hardware_output_device: self.options += [ "-H", f"{self.hardware_input_device} {self.hardware_output_device}".strip(), ] self.other_options = other_options if self.other_options: self.options += self.other_options @property def first_private_bus(self) -> int: return self.num_output_buses + self.num_input_buses @property def num_private_buses(self) -> int: return self.num_audio_buses - (self.num_output_buses + self.num_input_buses) def __repr__(self): return f"<ServerOptions {self.options}>" class NodeWatcher: def __init__(self, server: "SCServer"): self._server = server self.notification_addresses = ["/n_go", "/n_end", "/n_off", "/n_on", "/n_move"] def handle_notification(self, *args): kind, *info = args nodeid, _, _, _, *rest = info _LOGGER.debug("Handling %s notification for %s: %s", kind, nodeid, info) try: node = self._server.nodes[nodeid] if node is None: raise KeyError("weakref is None") except KeyError: pass else: node._handle_notification(kind, info) class SCServer(OSCCommunication): def __init__(self, options: Optional[ServerOptions] = None): if options is None: self.options = ServerOptions() _LOGGER.debug("Using default server options %s", self.options) else: self.options = options _LOGGER.debug("Using custom server options %s", self.options) self._num_node_ids: int = 0 self.process: Optional[Process] = None self._programm_name = SC3_SERVER_NAME self._client_id: int = SC3NB_SERVER_CLIENT_ID self._scsynth_address = LOCALHOST self._scsynth_port = self.options.udp_port self._max_logins = self.options.max_logins self._server_running: bool = False self._has_booted: bool = False self.latency: float = 0.0 self._init_osc_communication() self.nodes: WeakValueDictionary[int, Node] = WeakValueDictionary() self.node_watcher = NodeWatcher(self) for address in self.node_watcher.notification_addresses: self._osc_server.dispatcher.map( address, self.node_watcher.handle_notification ) self.node_ids: Optional[Allocator] = None self.buffer_ids: Optional[BlockAllocator] = None self.control_bus_ids: Optional[BlockAllocator] = None self.audio_bus_ids: Optional[BlockAllocator] = None self._root_node = Group(nodeid=0, new=False, target=0, server=self) self._default_groups: Dict[int, Group] = {} self._is_local: bool = False self._output_bus = Bus( rate=BusRate.AUDIO, num_channels=self.options.num_output_buses, index=0, server=self, ) self._input_bus = Bus( rate=BusRate.AUDIO, num_channels=self.options.num_input_buses, index=self.options.num_output_buses, server=self, ) self._server_init_hooks: List[Tuple[Callable[..., None], Any, Any]] = [] self._volume = Volume(self) atexit.register(self.quit) def boot( self, scsynth_path: Optional[str] = None, timeout: float = 5, console_logging: bool = True, with_blip: bool = True, kill_others: bool = True, allowed_parents: Sequence[str] = ALLOWED_PARENTS, ): if self._has_booted: warnings.warn("already booted") return print("Booting SuperCollider Server... ", end="") self._is_local = True self._scsynth_address = LOCALHOST self._scsynth_port = self.options.udp_port self._max_logins = self.options.max_logins self.process = Process( executable=self._programm_name, programm_args=self.options.options, executable_path=scsynth_path, console_logging=console_logging, kill_others=kill_others, allowed_parents=allowed_parents, ) try: self.process.read(expect="SuperCollider 3 server ready.", timeout=timeout) except ProcessTimeout as process_timeout: if ("Exception in World_OpenUDP" in process_timeout.output) or ( "ERROR: failed to open UDP socket" in process_timeout.output ): self.process.kill() self.process = None print( f"\nSuperCollider Server port {self.options.udp_port} already used." ) if self.options.udp_port != SCSYNTH_DEFAULT_PORT: raise ValueError( f"The specified UDP port {self.options.udp_port} is already used" ) from process_timeout print("Trying to connect.") self.remote( self._scsynth_address, self._scsynth_port, with_blip=with_blip ) else: print("Failed booting SuperCollider Server.") raise process_timeout else: self.init(with_blip) self._has_booted = True def init(self, with_blip: bool = True): self._init_osc_communication() self.add_receiver( self._programm_name, self._scsynth_address, self._scsynth_port ) self.notify() self.node_ids = NodeAllocator(self.client_id) buffers_per_user = int(self.options.num_sample_buffers / self.max_logins) buffer_id_offset = self.client_id * buffers_per_user self.buffer_ids = BlockAllocator(buffers_per_user, buffer_id_offset) audio_buses_per_user = int(self.options.num_private_buses / self.max_logins) audio_bus_id_offset = ( self.client_id * audio_buses_per_user + self.options.first_private_bus ) self.audio_bus_ids = BlockAllocator(audio_buses_per_user, audio_bus_id_offset) control_buses_per_user = int(self.options.num_control_buses / self.max_logins) control_bus_id_offset = self.client_id * control_buses_per_user self.control_bus_ids = BlockAllocator( control_buses_per_user, control_bus_id_offset ) self._output_bus = Bus( rate=BusRate.AUDIO, num_channels=self.options.num_output_buses, index=0, server=self, ) self._input_bus = Bus( rate=BusRate.AUDIO, num_channels=self.options.num_input_buses, index=self.options.num_output_buses, server=self, ) self.load_synthdefs() self.send_default_groups() self.sync() self._server_running = True for address in self.node_watcher.notification_addresses: self._osc_server.dispatcher.map( address, self.node_watcher.handle_notification ) self.execute_init_hooks() self.sync() if with_blip: self.blip() print("Done.") def execute_init_hooks(self) -> None: _LOGGER.debug("Executing init hooks %s", self._server_init_hooks) for hook, args, kwargs in self._server_init_hooks: if args and kwargs: hook(*args, **kwargs) elif args: hook(*args) elif kwargs: hook(**kwargs) else: hook() def connect_sclang(self, port: int) -> None: self.add_receiver(name="sclang", ip_address="127.0.0.1", port=port) self.execute_init_hooks() def add_init_hook( self, hook: Callable[..., None], *args: Any, **kwargs: Any ) -> None: self._server_init_hooks.append((hook, args, kwargs)) def bundler(self, timetag=0, msg=None, msg_params=None, send_on_exit=True): return super().bundler( timetag=timetag + self.latency, msg=msg, msg_params=msg_params, send_on_exit=send_on_exit, ) def blip(self) -> None: if self._volume.muted: warnings.warn("SCServer is muted. Blip will also be silent!") with self.bundler(0.15) as bundler: bundler.add(0.0, "/error", [0]) bundler.add( 0.0, "/s_new", ["s1", -1, 0, 0, "freq", 500, "dur", 0.1, "num", 1] ) bundler.add( 0.2, "/s_new", ["s2", -1, 0, 0, "freq", 1000, "amp", 0.05, "num", 2] ) bundler.add(0.3, "/n_free", [-1]) bundler.add(0.3, "/error", [1]) def remote(self, address: str, port: int, with_blip: bool = True) -> None: self._is_local = False self._scsynth_address = address self._scsynth_port = port self.init(with_blip=with_blip) self._has_booted = True def reboot(self) -> None: if not self.is_local: raise RuntimeError("Can't reboot a remote Server") receivers = self._receivers self.quit() self.boot() receivers.update( self._receivers ) self._receivers = receivers self.execute_init_hooks() self.sync() def ping(self): raise NotImplementedError def quit(self) -> None: print("Quitting SCServer... ", end="") try: self.msg(MasterControlCommand.QUIT, bundle=False) except OSCCommunicationError: pass finally: super().quit() self._server_running = False if self._is_local: self._has_booted = False self.process.kill() print("Done.") def sync(self, timeout=5) -> bool: sync_id = randint(1000, 9999) msg = OSCMessage(MasterControlCommand.SYNC, sync_id) return sync_id == self.send(msg, timeout=timeout, bundle=False) def send_synthdef(self, synthdef_bytes: bytes): SynthDef.send(synthdef_bytes=synthdef_bytes, server=self) def load_synthdef(self, synthdef_path: str): SynthDef.load(synthdef_path=synthdef_path, server=self) def load_synthdefs( self, synthdef_dir: Optional[str] = None, completion_msg: Optional[bytes] = None, ) -> None: SynthDef.load_dir( synthdef_dir=synthdef_dir, completion_msg=completion_msg, server=self, ) def notify( self, receive_notifications: bool = True, client_id: Optional[int] = None, timeout: float = 1, ) -> None: flag = 1 if receive_notifications else 0 client_id = client_id if client_id is not None else self._client_id msg = OSCMessage( MasterControlCommand.NOTIFY, [flag, client_id] ) try: return_val = self.send(msg, timeout=timeout, bundle=False) except OSCCommunicationError as error: errors = self._get_errors_for_address(msg.address) if len(errors) > 0: last_error_value = errors[-1] if isinstance(last_error_value, tuple): message, *rest = last_error_value else: message = last_error_value rest = None if "already registered" in message: self._client_id = rest[0] return elif "too many users" in message: raise RuntimeError( "scsynth has too many users. Can't notify." ) from error elif "not registered" in message: return raise error else: if receive_notifications: self._client_id, self._max_logins = return_val def free_all(self, root: bool = True) -> None: group = self._root_node if root else self.default_group group.free_all() self.clear_schedule() if root: self.send_default_groups() else: self.default_group.new() self.execute_init_hooks() self.sync() def clear_schedule(self): self.msg(MasterControlCommand.CLEAR_SCHED) def send_default_groups(self) -> None: client_ids = range(self._max_logins) def create_default_group(client_id) -> Group: return Group( nodeid=2 ** 26 * client_id + 1, target=0, server=self, new=True ) self._default_groups = { client: create_default_group(client) for client in client_ids } @property def client_id(self): return self._client_id @property def max_logins(self): return self._max_logins @property def default_group(self): return self._default_groups[self._client_id] @property def input_bus(self) -> Bus: return self._input_bus @property def output_bus(self) -> Bus: return self._output_bus @property def volume(self) -> float: return self._volume.volume @volume.setter def volume(self, volume): self._volume.volume = volume @property def muted(self) -> bool: return self._volume.muted @muted.setter def muted(self, muted): self._volume.muted = muted def mute(self) -> None: self._volume.mute() def unmute(self) -> None: self._volume.unmute() def version(self) -> ServerVersion: msg = OSCMessage(MasterControlCommand.VERSION) return ServerVersion._make(self.send(msg, bundle=False)) def status(self) -> ServerStatus: msg = OSCMessage(MasterControlCommand.STATUS) return ServerStatus._make(self.send(msg, bundle=False)[1:]) def dump_osc(self, level: int = 1) -> None: msg = OSCMessage(MasterControlCommand.DUMP_OSC, [level]) self.send(msg) def dump_tree(self, controls: bool = True, return_tree=False) -> Optional[str]: if not self.is_local or self.process is None: warnings.warn("Server is not local or not booted. Use query_tree") return self.process.read() msg = OSCMessage(GroupCommand.DUMP_TREE, [0, 1 if controls else 0]) self.send(msg, bundle=False) node_tree = self.process.read(expect="NODE TREE") print(node_tree) if return_tree: return node_tree def query_tree(self, include_controls: bool = True) -> Group: return self._root_node.query_tree(include_controls=include_controls) @property def peak_cpu(self) -> float: return self.status().peak_cpu @property def avg_cpu(self) -> float: return self.status().peak_cpu @property def nominal_sr(self) -> float: return self.status().nominal_sr @property def actual_sr(self) -> float: return self.status().actual_sr @property def num_synths(self) -> int: return self.status().num_synths @property def num_groups(self) -> int: return self.status().num_groups @property def num_ugens(self) -> int: return self.status().num_ugens @property def num_synthdefs(self) -> int: return self.status().num_synthdefs @property def addr(self) -> Tuple[str, int]: return (self._scsynth_address, self._scsynth_port) @property def has_booted(self) -> bool: return self._has_booted @property def is_running(self) -> bool: return self._server_running @property
MIT License
elcorto/pwtools
pwtools/crys.py
scell
python
def scell(obj, dims, method=1, **kwds): if 'direc' not in kwds: kwds['direc'] = 1 mask = scell_mask(*tuple(dims), **kwds) nmask = mask.shape[0] if obj.is_struct: sc_cell = obj.cell * np.asarray(dims)[:,None] container = Structure elif obj.is_traj: sc_cell = obj.cell * np.asarray(dims)[None,:,None] container = Trajectory else: raise Exception("unknown input type") if method == 1: sc_symbols = np.array(obj.symbols).repeat(nmask).tolist() if (obj.symbols is not None) else None if obj.is_struct: sc_coords_frac = (obj.coords_frac[:,None,:] + mask[None,...]).reshape(obj.natoms*nmask,3) elif obj.is_traj: sc_coords_frac = (obj.coords_frac[...,None,:] + mask[None,None,...]).reshape(obj.nstep,obj.natoms*nmask,3) else: raise Exception("huh!?") elif method == 2: if obj.is_struct: sc_symbols = [] sc_coords_frac = np.empty((nmask*obj.natoms, 3), dtype=float) k = 0 for iatom in range(obj.natoms): for j in range(nmask): if obj.symbols is not None: sc_symbols.append(obj.symbols[iatom]) sc_coords_frac[k,:] = obj.coords_frac[iatom,:] + mask[j,:] k += 1 else: raise Exception("method=2 only implemented for Structure") else: raise Exception("unknown method: %s" %repr(method)) sc_coords_frac[...,0] /= dims[0] sc_coords_frac[...,1] /= dims[1] sc_coords_frac[...,2] /= dims[2] return container(coords_frac=sc_coords_frac, cell=sc_cell, symbols=sc_symbols)
Build supercell based on `dims`. Uses coords_frac and cell. Parameters ---------- obj : Structure or Trajectory dims : tuple (nx, ny, nz) for a N = nx * ny * nz supercell method : int, optional Switch between numpy-ish (1) or loop (2) implementation. (2) should always produce correct results but is sublty slower. Only for Structure. **kwds : see :func:`scell_mask` Notes ----- The mask for the supercell is created by :func:`scell_mask` and applied to each atom in `obj` one after another, i.e. each atom is repeated nx*ny*nz times according to the mask pattern, independently of how the pattern looks like (e.g. the `direc` parameter in :func:`scell_mask`). So, just as rows in np.repeat(), we have: | original: symbols=[A,B,C,D] | 2 x 1 x 1: symbols=[A,A,B,B,C,C,D,D] | nx x ny x nz: symbols=[(nx*ny*nz) x A, (nx*ny*nz) x B, ...] Returns ------- scell : Structure
https://github.com/elcorto/pwtools/blob/99831540c6eb1fc7e8bd9b1ce61375b330f4f43e/pwtools/crys.py#L558-L658
from math import acos, pi, sin, cos, sqrt import textwrap import time import tempfile import copy import itertools import numpy as np from scipy.linalg import inv from pwtools import common, signal, num, atomic_data, constants, _flib from pwtools.common import assert_cond from pwtools.decorators import crys_add_doc from pwtools.base import FlexibleGetters from pwtools.constants import Angstrom from pwtools.num import fempty, rms3d, match_mask, norm import warnings def angle(x,y): return acos(np.dot(x,y)/norm(x)/norm(y))*180.0/pi @crys_add_doc def volume_cell(cell): assert_cond(cell.shape == (3,3), "input must be (3,3) array") return abs(np.linalg.det(cell)) def volume_cell3d(cell, axis=0): assert cell.ndim == 3 sl = [slice(None)]*cell.ndim ret = [] for ii in range(cell.shape[axis]): sl[axis] = ii ret.append(volume_cell(cell[tuple(sl)])) return np.array(ret) @crys_add_doc def volume_cc(cryst_const): assert len(cryst_const) == 6, "shape must be (6,)" a = cryst_const[0] b = cryst_const[1] c = cryst_const[2] alpha = cryst_const[3]*pi/180 beta = cryst_const[4]*pi/180 gamma = cryst_const[5]*pi/180 return a*b*c*sqrt(1+ 2*cos(alpha)*cos(beta)*cos(gamma) - cos(alpha)**2 - cos(beta)**2 - cos(gamma)**2) def volume_cc3d(cryst_const, axis=0): assert cryst_const.ndim == 2 sl = [slice(None)]*cryst_const.ndim ret = [] for ii in range(cryst_const.shape[axis]): sl[axis] = ii ret.append(volume_cc(cryst_const[tuple(sl)])) return np.array(ret) @crys_add_doc def cell2cc(cell): cell = np.asarray(cell) assert_cond(cell.shape == (3,3), "cell must be (3,3) array") cryst_const = np.empty((6,), dtype=float) cryst_const[:3] = np.sqrt((cell**2.0).sum(axis=1)) va = cell[0,:] vb = cell[1,:] vc = cell[2,:] cryst_const[3] = angle(vb,vc) cryst_const[4] = angle(va,vc) cryst_const[5] = angle(va,vb) return cryst_const def cell2cc3d(cell, axis=0): assert cell.ndim == 3 sl = [slice(None)]*cell.ndim ret = [] for ii in range(cell.shape[axis]): sl[axis] = ii ret.append(cell2cc(cell[tuple(sl)])) return np.array(ret) @crys_add_doc def cc2cell(cryst_const): a = cryst_const[0] b = cryst_const[1] c = cryst_const[2] alpha = cryst_const[3]*pi/180 beta = cryst_const[4]*pi/180 gamma = cryst_const[5]*pi/180 va = np.array([a,0,0]) vb = np.array([b*cos(gamma), b*sin(gamma), 0]) cx = c*cos(beta) cy = c*(cos(alpha) - cos(beta)*cos(gamma))/sin(gamma) cz = sqrt(c**2 - cy**2 - cx**2) vc = np.array([cx, cy, cz]) return np.array([va, vb, vc]) def cc2cell3d(cryst_const, axis=0): assert cryst_const.ndim == 2 sl = [slice(None)]*cryst_const.ndim ret = [] for ii in range(cryst_const.shape[axis]): sl[axis] = ii ret.append(cc2cell(cryst_const[tuple(sl)])) return np.array(ret) @crys_add_doc def recip_cell(cell): cell = np.asarray(cell, dtype=float) assert_cond(cell.shape == (3,3), "cell must be (3,3) array") rcell = np.empty_like(cell) vol = volume_cell(cell) a = cell[0,:] b = cell[1,:] c = cell[2,:] rcell[0,:] = 2*pi/vol * np.cross(b,c) rcell[1,:] = 2*pi/vol * np.cross(c,a) rcell[2,:] = 2*pi/vol * np.cross(a,b) return rcell def grid_in_cell(cell, h=None, size=None, minpoints=1, even=False, fullout=False): spacing = h assert None in [spacing, size], "use either `h` or `size`" assert minpoints >= 0 cell = np.asarray(cell, dtype=float) norms = np.sqrt((cell**2.0).sum(axis=1)) if size is None: size = np.round(norms / spacing) if even: size += (size % 2.0) size = size.astype(int) mask = size < minpoints if mask.any(): size[mask] = minpoints if (size == 0).any(): raise Exception("at least one point count is zero, decrease `spacing`, " "size=%s" %str(size)) if fullout: return size, norms * 1.0 / size else: return size.astype(int) else: size = np.array(size) return norms * 1.0 / size def kgrid(cell, **kwds): if 'dk' in kwds: warnings.warn("`dk` is deprecated, use `h` instead", DeprecationWarning) kwds['h'] = kwds['dk'] kwds.pop('dk') return grid_in_cell(recip_cell(cell), **kwds) @crys_add_doc def cc2celldm(cryst_const, fac=1.0): assert len(cryst_const) == 6, ("cryst_const has length != 6") celldm = np.empty((6,), dtype=np.float) a,b,c,alpha,beta,gamma = np.asarray(cryst_const, dtype=np.float) celldm[0] = a*fac celldm[1] = b/a celldm[2] = c/a celldm[3] = cos(alpha*pi/180.0) celldm[4] = cos(beta*pi/180.0) celldm[5] = cos(gamma*pi/180.0) return celldm @crys_add_doc def celldm2cc(celldm, fac=1.0): assert len(celldm) == 6, ("celldm has length != 6") cryst_const = np.empty((6,), dtype=np.float) a,ba,ca,cos_alpha,cos_beta,cos_gamma = np.asarray(celldm, dtype=np.float) a = a*fac cryst_const[0] = a cryst_const[1] = ba * a cryst_const[2] = ca * a cryst_const[3] = acos(cos_alpha) / pi * 180.0 cryst_const[4] = acos(cos_beta) / pi * 180.0 cryst_const[5] = acos(cos_gamma) / pi * 180.0 return cryst_const def scell_mask(nx, ny, nz, direc=1): if direc == 1: mkrange = lambda x: range(0,x) if x >= 0 else range(0,x,-1) elif direc == -1: mkrange = lambda x: range(x-1,-1,-1) if x >= 0 else range(x+1,1) return np.array([k for k in itertools.product(mkrange(nx), mkrange(ny), mkrange(nz))], dtype=float)
BSD 3-Clause New or Revised License
pythoncloudframeworks/bunny-storm
bunny_storm/async_adapter.py
AsyncAdapter._wait_result
python
async def _wait_result(self, corr_id: str, timeout: Union[int, float, None] = None) -> asyncio.Future: self.logger.info(f"Starting to wait for result. {corr_id}") try: future = self._rpc_corr_id_dict[corr_id] except KeyError: future = asyncio.Future() future.set_exception(KeyError(f"No future exists for correlation ID {corr_id}")) return future try: await asyncio.wait_for(future, timeout=timeout) except asyncio.TimeoutError: self.logger.error(f"RPC timeout. Correlation id: {corr_id}") del self._rpc_corr_id_dict[corr_id] future = asyncio.Future() future.set_exception(TimeoutError(f'RPC timeout. Correlation id: {corr_id}')) return future
Wait for RPC result for given correlation ID until the given timeout fires. :param corr_id: Correlation ID to wait for :param timeout: Timeout in seconds :return: Future whose result we set
https://github.com/pythoncloudframeworks/bunny-storm/blob/2bec24bc475dc84ff83f8415de4a836a9356c949/bunny_storm/async_adapter.py#L268-L291
import asyncio import logging from logging import Logger import uuid import sys from types import FunctionType from typing import Union, Dict from aio_pika import Message, DeliveryMode, IncomingMessage from . import RabbitMQConnectionData, AsyncConnection, Consumer, Publisher class AsyncAdapter: _rabbitmq_connection_data: RabbitMQConnectionData _logger: Logger _loop: asyncio.AbstractEventLoop _configuration: dict _publishers: Dict[str, Publisher] _consumers: Dict[str, Consumer] def __init__(self, rabbitmq_connection_data: RabbitMQConnectionData, configuration: dict, logger: Logger = None, loop: asyncio.AbstractEventLoop = None, connection_properties: dict = None, connection_timeout: Union[int, float] = 10, connection_attempts: int = 5, attempt_backoff: int = 5): self._rabbitmq_connection_data = rabbitmq_connection_data self._logger = logger or self._create_logger() self._loop = loop or asyncio.get_event_loop() self._configuration = configuration self._connection = AsyncConnection( rabbitmq_connection_data, self.logger, self._loop, connection_properties, connection_timeout, connection_attempts, attempt_backoff ) self._rpc_corr_id_dict = dict() self._default_publisher = Publisher(connection=self._connection, logger=self.logger, exchange_name="", loop=self._loop) self._publishers = dict() for publish_configuration in self._configuration.get("publish", dict()).values(): self.add_publisher(publish_configuration) self._consumers = dict() for receive_configuration in self._configuration.get("receive", dict()).values(): self.add_consumer(receive_configuration) @staticmethod def _create_logger() -> Logger: logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) sh = logging.StreamHandler(sys.stdout) formatter = logging.Formatter('[%(asctime)s] [%(filename)-25s] [%(levelname)-8s] %(message)s') sh.setFormatter(formatter) sh.setLevel(logging.DEBUG) logger.addHandler(sh) logger.propagate = False return logger @property def logger(self) -> Logger: return self._logger def add_publisher(self, configuration: dict) -> Publisher: publisher = Publisher(connection=self._connection, logger=self.logger, loop=self._loop, **configuration) self._publishers[configuration["exchange_name"]] = publisher return publisher def add_consumer(self, configuration: dict) -> Consumer: consumer = Consumer(connection=self._connection, logger=self.logger, loop=self._loop, **configuration) self._consumers[configuration["queue_name"]] = consumer return consumer async def publish(self, body: bytes, exchange: str, routing_key: str = None, properties: dict = None, mandatory: bool = True, immediate: bool = False, timeout: Union[int, float, None] = None): self.logger.info("Trying to publish message") if properties is None: properties = dict(delivery_mode=DeliveryMode.PERSISTENT) try: message = Message(body, **properties) if exchange == "": await self._default_publisher.default_exchange_publish( message, routing_key=routing_key, mandatory=mandatory, immediate=immediate, timeout=timeout) else: publisher = self._publishers.get(exchange) if publisher is None: raise KeyError(f"There is no publisher for the given exchange: {exchange}") await publisher.publish( message, routing_key=routing_key, mandatory=mandatory, immediate=immediate, timeout=timeout) except Exception: self.logger.exception("Failed to publish message") raise async def receive(self, handler, queue: str, no_ack: bool = False, exclusive: bool = False) -> None: consumer = self._consumers.get(queue) if consumer is None: raise KeyError(f"There is no consumer for the given queue: {queue}") try: await consumer.consume(self._on_message, handler=handler, no_ack=no_ack, exclusive=exclusive) except Exception: self.logger.exception("Failed to receive message.") raise async def _on_message(self, message: IncomingMessage, handler: FunctionType) -> None: self.logger.info("Received a new message") try: result = await handler(self.logger, message) self.logger.info("Message has been processed successfully") if message.reply_to is not None: self.logger.info(f"Sending result back to " f"queue: {message.reply_to}, correlation id: {message.correlation_id}") response_message = Message(body=result, correlation_id=message.correlation_id, reply_to=message.reply_to) await self._default_publisher.default_exchange_publish(message=response_message, routing_key=message.reply_to, mandatory=False) self.logger.info(f"Sent result back to caller. " f"Queue: {message.reply_to}, correlation id: {message.correlation_id}") except Exception: self.logger.exception("Failed to handle received message.") raise finally: message.ack() async def rpc(self, body: bytes, receive_queue: str, publish_exchange: str, timeout: Union[int, float], ttl: int) -> bytes: consumer = self._consumers.get(receive_queue) if consumer is None: raise KeyError(f"There is no consumer for the given queue: {receive_queue}") self.logger.info(f"Preparing to rpc call. Publish exchange: {publish_exchange}; Receive queue: {receive_queue}") await consumer.consume(self._rpc_response_callback) correlation_id = self._prepare_rpc_correlation_id() properties = dict(correlation_id=correlation_id, reply_to=receive_queue, expiration=ttl * 1000) await self.publish(body, publish_exchange, properties=properties, mandatory=True) self.logger.info(f"RPC message has been sent. {correlation_id}") future = await self._wait_result(correlation_id, timeout) self.logger.info(f"RPC message gets response. {correlation_id}") if future.exception(): self.logger.error(f"RPC future returned exception: {future.exception()}") raise future.exception() if correlation_id in self._rpc_corr_id_dict: del self._rpc_corr_id_dict[correlation_id] return future.result() def _prepare_rpc_correlation_id(self) -> str: correlation_id = str(uuid.uuid1()) while correlation_id in self._rpc_corr_id_dict: correlation_id = str(uuid.uuid1()) self.logger.info(f"Starting rpc calling correlation id: {correlation_id}") future = self._loop.create_future() self._rpc_corr_id_dict[correlation_id] = future return correlation_id def _rpc_response_callback(self, message: IncomingMessage) -> None: self.logger.info(f"Received RPC response. Correlation id: {message.correlation_id}") if message.correlation_id in self._rpc_corr_id_dict: self.logger.info(f"Received RPC response. Correlation id: {message.correlation_id} was found") self._rpc_corr_id_dict[message.correlation_id].set_result(message.body) else: self.logger.warning(f"Received unexpected RPC response. Correlation id: {message.correlation_id}") message.ack()
MIT License
jgurtowski/ectools
stats.py
nstarsToString
python
def nstarsToString(nstars): s = "N{star}={length} N{star}cnt={count}" return "\n".join(map(lambda x: s.format(**dict(x._asdict())), nstars))
List of nstars to make into a string
https://github.com/jgurtowski/ectools/blob/031eb0300c82392915d8393a5fedb4d3452b15bf/stats.py#L148-L152
from itertools import imap from collections import namedtuple from operator import add, attrgetter, lt from functools import partial from math import sqrt, ceil from string import ljust from strutil import strAppend BasicStats = namedtuple('BasicStats', ["n","min","max", "mean","stdev", "sum","cov"]) NStarType = namedtuple('NStar', ['star','count', 'length']) BigIncrement = namedtuple('BigIncrement', ['increment', 'count', 'bases', 'coverage']) SpanCov = namedtuple('SpanCov', ['increment', 'count', 'bases', 'coverage']) HistBin = namedtuple('HistBin', ['bin', 'count']) ExtendedStats = namedtuple('ExtendedStats', ['basic', 'nstar', 'bigs', 'hist', 'spancovs', 'genome_size']) def SpanningCoverage(increments,genome_size =None): def _SC(inc): def _N(lens): cnt = 0 bases_greater = 0 for l in lens: if l > inc: bases_greater += l-inc cnt += 1 cov = bases_greater / float(genome_size) if genome_size else None return SpanCov(inc, cnt, bases_greater, cov) return _N return map(_SC, increments) def NStar(increments, genome_size): def _Nstar(inc): def _N(data): cutoff = genome_size * (inc/100.0) cumsum = 0 cnt = 0 for l in data: cnt += 1 cumsum += l if cumsum >= cutoff: return NStarType(inc,cnt,l) return NStarType(inc,float('NaN'),float('NaN')) return _N return map(_Nstar, increments) def LBig(length_increments,genome_size=None): def _LBig(inc): def _L(data): cnt = 0 bases = 0 cov = None if genome_size: cov = 0 for l in data: if l <= inc: break cnt += 1 bases += l if genome_size: cov = bases / float(genome_size) return BigIncrement(inc,cnt,bases,cov) return _L return map(_LBig, length_increments) def Hist(increments, bin_size): def _Hist(inc): def _H(lens): cond = lambda x: x >= inc and x < inc+bin_size return HistBin(inc,len(filter(cond, lens))) return _H return map(_Hist, increments) def getBasicStats(lengths, genome_size = None): num = len(lengths) total = sum(lengths) mean = total / float(num) stdev = sqrt(reduce(add,imap( lambda y: y*y, imap( lambda x : x-mean, lengths))) / float(num)) cov = None if genome_size: cov = total / float(genome_size) minimum = lengths[-1] maximum = lengths[0] return BasicStats(num, minimum, maximum, mean, stdev, total,cov) def basicStatsToString(basic_stats): s = "n={n} [{min}, {max}] {mean:.2f} +/- {stdev:.2f} sum={sum}" fmtstr = s.format(**dict(basic_stats._asdict())) if basic_stats.cov: fmtstr += " cov={0:.2f}".format(basic_stats.cov) return fmtstr
BSD 3-Clause New or Revised License
mirantis/ceph-lcm
decapodcli/decapodcli/decorators.py
filtered_output
python
def filtered_output(func): if not utils.JSON_FILTERS: return func func = click.option( "--filtered", type=param_types.FILTERED_OUTPUT, default="", help=( "Filter output using expression engines. Valid options are {0}." "To setup, you first need to put engine type upfront semicolon " "and expression after. Example: 'jq:.[]|.id'. Please use " "correspond documentation on engines." ).format(sorted(utils.JSON_FILTERS)))(func) @six.wraps(func) def decorator(filtered, *args, **kwargs): response = func(*args, **kwargs) if not filtered: return response expression, converter = filtered return converter(expression, response) return decorator
Decorator to support filtered output.
https://github.com/mirantis/ceph-lcm/blob/fad9bad0b94f2ef608362953583b10a54a841d24/decapodcli/decapodcli/decorators.py#L305-L331
from __future__ import absolute_import from __future__ import unicode_literals import os import click import six from decapodcli import param_types from decapodcli import utils from decapodlib import exceptions try: import pygments except ImportError: pygments = None def catch_errors(func): @six.wraps(func) @click.pass_context def decorator(ctx, *args, **kwargs): try: return func(*args, **kwargs) except exceptions.DecapodAPIError as exc: utils.format_output_json(ctx, exc.json, True) except exceptions.DecapodError as exc: click.echo(six.text_type(exc), err=True) finally: ctx.close() ctx.exit(os.EX_SOFTWARE) return decorator def with_client(func): @six.wraps(func) @click.pass_context def decorator(ctx, *args, **kwargs): kwargs["client"] = ctx.obj["client"] return func(*args, **kwargs) return decorator def format_output(func): @six.wraps(func) @click.pass_context def decorator(ctx, *args, **kwargs): response = func(*args, **kwargs) if not response: return if ctx.obj["format"] == "json": utils.format_output_json(ctx, response) return decorator def with_color(func): if pygments is None: def decorator(*args, **kwargs): kwargs["color"] = None return func(*args, **kwargs) else: decorator = click.option( "--color", default=None, type=click.Choice(["light", "dark"]), help=( "Colorize output. By default no color is used. " "Parameter means colorscheme of the terminal") )(func) decorator = six.wraps(func)(decorator) return decorator def with_pagination(func): @six.wraps(func) @click.option( "--page", "-p", type=int, default=None, help="Page to request." ) @click.option( "--per_page", "-r", type=int, default=None, help="How many items should be displayed per page." ) @click.option( "--all", "-a", is_flag=True, help=( "Show all items, without pagination. " "Default behavior, 'page' and 'per_page' options disable this " "option." ) ) @click.option( "--list", "-l", type=click.Choice(["active", "archived", "all"]), show_default=True, default="active", help="List only certain class of elements. 'active' is default." ) @click.option( "--sort-by", "-s", default="", type=param_types.SORT_BY, help=( "Comma-separated list of fieldnames for sorting. To define " "direction, please put '-' or '+' before name ('+' explicitly " "means). For example: 'time_deleted,-name,+version' means " "that sorting will be done by tuple (time_deleted ASC, " "name DESC, version ASC)" ) ) @click.option( "--no-envelope", "-n", is_flag=True, help=( "Remove pagination envelope, just list items. If all items " "requested, this implicitly meant." ) ) @click.pass_context def decorator(ctx, *args, **kwargs): all_items = kwargs.pop("all", None) page = kwargs.pop("page", None) per_page = kwargs.pop("per_page", None) no_envelope = kwargs.pop("no_envelope", None) list_elements = kwargs.pop("list", "active") sort_by = kwargs.pop("sort_by", {}) all_items = all_items or not (page or per_page) no_envelope = all_items or no_envelope if all_items: query_params = {"all_items": True} else: query_params = { "page": page, "per_page": per_page } query_params["filter"] = {} if list_elements == "all": query_params["filter"]["time_deleted"] = { "ne": "unreal_value" } elif list_elements == "archived": query_params["filter"]["time_deleted"] = { "ne": 0 } else: del query_params["filter"] if sort_by: query_params["sort_by"] = sort_by kwargs["query_params"] = query_params response = func(*args, **kwargs) if no_envelope and response: response = response["items"] return response return decorator def model_edit(item_id, fetch_method_name, parse_json=True): def outer_decorator(func): @six.wraps(func) @click.option( "--model-editor", is_flag=True, help=( "Fetch model and launch editor to fix stuff. Please pay " "attention that only 'data' field will be available for " "editing." ) ) @click.option( "--model", default=None, type=param_types.JSON, help=( "Full model data. If this parameter is set, other options " "won't be used. This parameter is JSON dump of the model." ) ) @click.option( "--model-stdin", is_flag=True, help="Slurp model from stdin." ) @click.pass_context def inner_decorator(ctx, model_stdin, model_editor, model, *args, **kwargs): if not model: if model_stdin: stream = click.get_text_stream("stdin") model = "".join(stream) elif model_editor: fetch_function = getattr( ctx.obj["client"], fetch_method_name) model = fetch_function(kwargs[item_id]) if "data" in model: updated_data = utils.json_dumps(model["data"]) updated_data = click.edit(updated_data) if not updated_data: return updated_data = utils.json_loads(updated_data) model = fetch_function(kwargs[item_id]) model["data"] = updated_data model = utils.json_dumps(model) else: model = utils.json_dumps(model) model = click.edit(model) if (model_stdin or model_editor) and not model: return if model and parse_json and not isinstance(model, dict): if isinstance(model, bytes): model = model.decode("utf-8") model = utils.json_loads(model) kwargs["model"] = model return func(*args, **kwargs) return inner_decorator return outer_decorator def command(command_class, paginate=False, filtered=False): def decorator(func): func = with_client(func) if paginate: func = with_pagination(func) if filtered: func = filtered_output(func) func = format_output(func) func = catch_errors(func) name = utils.parameter_name(func.__name__) func = command_class.command(name=name)(func) return func return decorator
Apache License 2.0
shannonai/fast-knn-nmt
thirdparty/fairseq/fairseq/data/audio/speech_to_text_dataset.py
S2TDataConfig.sampling_alpha
python
def sampling_alpha(self): return self.config.get("sampling_alpha", 1.0)
Hyper-parameter alpha = 1/T for temperature-based resampling. (alpha = 1 for no resampling)
https://github.com/shannonai/fast-knn-nmt/blob/27bbdd967befe06bfbfde11ab9cfa34b4aa46482/thirdparty/fairseq/fairseq/data/audio/speech_to_text_dataset.py#L91-L94
import csv import io import logging import os.path as op import re from typing import Dict, List, Optional, Tuple import numpy as np import torch from fairseq.data import ( ConcatDataset, Dictionary, FairseqDataset, ResamplingDataset, data_utils as fairseq_data_utils, ) from fairseq.data.audio.audio_utils import get_fbank, get_waveform from fairseq.data.audio.feature_transforms import CompositeAudioFeatureTransform logger = logging.getLogger(__name__) class S2TDataConfig(object): def __init__(self, yaml_path): try: import yaml except ImportError: print("Please install PyYAML to load YAML files for " "S2T data config") self.config = {} if op.isfile(yaml_path): try: with open(yaml_path) as f: self.config = yaml.load(f, Loader=yaml.FullLoader) except Exception as e: logger.info(f"Failed to load config from {yaml_path}: {e}") else: logger.info(f"Cannot find {yaml_path}") @property def vocab_filename(self): return self.config.get("vocab_filename", "dict.txt") @property def shuffle(self) -> bool: return self.config.get("shuffle", False) @property def pre_tokenizer(self) -> Dict: return self.config.get("pre_tokenizer", {"tokenizer": None}) @property def bpe_tokenizer(self) -> Dict: return self.config.get("bpe_tokenizer", None) @property def prepend_tgt_lang_tag(self) -> bool: return self.config.get("prepend_tgt_lang_tag", False) @property def input_feat_per_channel(self): return self.config.get("input_feat_per_channel", 80) @property def input_channels(self): return self.config.get("input_channels", 1) @property
Apache License 2.0
donghun2018/seqdecisionlib-release
ClinicalTrials/ClinicalTrialsPolicySolutionQ6.py
ClinicalTrialsPolicy.model_A_policy
python
def model_A_policy(self, state, info_tuple): success_A = info_tuple[0] stop_A = info_tuple[1] sim_model = ClinicalTrialsModel(self.model.state_variables, self.model.decision_variables, self.model.initial_state, True) sim_model.state = copy.deepcopy(state) if stop_A==False: value_dict={} sol_dict,value_dict = model_A_value_fn(sim_model, 0, success_A,value_dict) new_decision = sol_dict['optimal_enroll'] else: new_decision=0 return new_decision
implements deterministic lookahead policy based on Model A :param state: namedtuple - the state of the model at a given time :param info_tuple: tuple - contains the parameters needed to run the policy :return: a decision made based on the policy
https://github.com/donghun2018/seqdecisionlib-release/blob/a01e95e9168dd8f87751c29f94bb382f83567e71/ClinicalTrials/ClinicalTrialsPolicySolutionQ6.py#L59-L80
from collections import namedtuple import numpy as np from scipy.stats import binom import scipy import math import pandas as pd import copy from ClinicalTrialsModel import ClinicalTrialsModel import time def trunc_poisson_fn(count, mean): trunc_probs = [] sum = 0.0 for r in range(0, count): trunc_probs.insert(r, 1/math.factorial(r)*(mean**r)*np.exp(-mean)) sum += trunc_probs[r] trunc_probs.insert(count, 1-sum) return trunc_probs class ClinicalTrialsPolicy(): def __init__(self, model, policy_names): self.model = model self.policy_names = policy_names self.Policy = namedtuple('Policy', policy_names) def build_policy(self, info): return self.Policy(*[info[k] for k in self.policy_names])
MIT License
qutip/qutip
qutip/control/dynamics.py
_func_deprecation
python
def _func_deprecation(message, stacklevel=3): warnings.warn(message, DeprecationWarning, stacklevel=stacklevel)
Issue deprecation warning Using stacklevel=3 will ensure message refers the function calling with the deprecated parameter,
https://github.com/qutip/qutip/blob/5060a7d768fff05f8e016f3b19ab9e48cb656378/qutip/control/dynamics.py#L116-L122
import warnings import numpy as np import scipy.linalg as la import scipy.sparse as sp from qutip.qobj import Qobj from qutip.sparse import sp_eigs, eigh import qutip.settings as settings import qutip.logging_utils as logging logger = logging.get_logger() import qutip.control.errors as errors import qutip.control.tslotcomp as tslotcomp import qutip.control.fidcomp as fidcomp import qutip.control.propcomp as propcomp import qutip.control.symplectic as sympl import qutip.control.dump as qtrldump DEF_NUM_TSLOTS = 10 DEF_EVO_TIME = 1.0 def _is_string(var): try: if isinstance(var, basestring): return True except NameError: try: if isinstance(var, str): return True except: return False except: return False return False def _check_ctrls_container(ctrls): if isinstance(ctrls, (list, tuple)): try: if isinstance(ctrls[0], (list, tuple)): ctrls_ = np.empty((len(ctrls), len(ctrls[0])), dtype=object) for i, ctrl in enumerate(ctrls): ctrls_[i, :] = ctrl ctrls = ctrls_ except: pass if isinstance(ctrls, np.ndarray): if len(ctrls.shape) != 2: raise TypeError("Incorrect shape for ctrl dyn gen array") for k in range(ctrls.shape[0]): for j in range(ctrls.shape[1]): if not isinstance(ctrls[k, j], Qobj): raise TypeError("All control dyn gen must be Qobj") elif isinstance(ctrls, (list, tuple)): for ctrl in ctrls: if not isinstance(ctrl, Qobj): raise TypeError("All control dyn gen must be Qobj") else: raise TypeError("Controls list or array not set correctly") return ctrls def _check_drift_dyn_gen(drift): if not isinstance(drift, Qobj): if not isinstance(drift, (list, tuple)): raise TypeError("drift should be a Qobj or a list of Qobj") else: for d in drift: if not isinstance(d, Qobj): raise TypeError( "drift should be a Qobj or a list of Qobj") warnings.simplefilter('always', DeprecationWarning) def _attrib_deprecation(message, stacklevel=3): warnings.warn(message, DeprecationWarning, stacklevel=stacklevel)
BSD 3-Clause New or Revised License
kuri65536/python-for-android
python-modules/twisted/twisted/web/sux.py
XMLParser.gotDoctype
python
def gotDoctype(self, doctype): print '!DOCTYPE', repr(doctype)
Encountered DOCTYPE This is really grotty: it basically just gives you everything between '<!DOCTYPE' and '>' as an argument.
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-modules/twisted/twisted/web/sux.py#L625-L631
from twisted.internet.protocol import Protocol, FileWrapper from twisted.python.reflect import prefixedMethodNames BEGIN_HANDLER = 0 DO_HANDLER = 1 END_HANDLER = 2 identChars = '.-_:' lenientIdentChars = identChars + ';+#/%~' def nop(*args, **kw): def unionlist(*args): l = [] for x in args: l.extend(x) d = dict([(x, 1) for x in l]) return d.keys() def zipfndict(*args, **kw): default = kw.get('default', nop) d = {} for key in unionlist(*[fndict.keys() for fndict in args]): d[key] = tuple([x.get(key, default) for x in args]) return d def prefixedMethodClassDict(clazz, prefix): return dict([(name, getattr(clazz, prefix + name)) for name in prefixedMethodNames(clazz, prefix)]) def prefixedMethodObjDict(obj, prefix): return dict([(name, getattr(obj, prefix + name)) for name in prefixedMethodNames(obj.__class__, prefix)]) class ParseError(Exception): def __init__(self, filename, line, col, message): self.filename = filename self.line = line self.col = col self.message = message def __str__(self): return "%s:%s:%s: %s" % (self.filename, self.line, self.col, self.message) class XMLParser(Protocol): state = None encodings = None filename = "<xml />" beExtremelyLenient = 0 _prepend = None _leadingBodyData = None def connectionMade(self): self.lineno = 1 self.colno = 0 self.encodings = [] def saveMark(self): return (self.lineno, self.colno) def _parseError(self, message): raise ParseError(*((self.filename,)+self.saveMark()+(message,))) def _buildStateTable(self): stateTable = getattr(self.__class__, '__stateTable', None) if stateTable is None: stateTable = self.__class__.__stateTable = zipfndict( *[prefixedMethodObjDict(self, prefix) for prefix in ('begin_', 'do_', 'end_')]) return stateTable def _decode(self, data): if 'UTF-16' in self.encodings or 'UCS-2' in self.encodings: assert not len(data) & 1, 'UTF-16 must come in pairs for now' if self._prepend: data = self._prepend + data for encoding in self.encodings: data = unicode(data, encoding) return data def maybeBodyData(self): if self.endtag: return 'bodydata' if (self.tagName == 'script' and not self.tagAttributes.has_key('src')): self.begin_bodydata(None) return 'waitforendscript' return 'bodydata' def dataReceived(self, data): stateTable = self._buildStateTable() if not self.state: if data.startswith('\xff\xfe'): self._prepend = '\xff\xfe' self.encodings.append('UTF-16') data = data[2:] elif data.startswith('\xfe\xff'): self._prepend = '\xfe\xff' self.encodings.append('UTF-16') data = data[2:] self.state = 'begin' if self.encodings: data = self._decode(data) lineno, colno = self.lineno, self.colno curState = self.state _saveMark = self.saveMark def saveMark(): return (lineno, colno) self.saveMark = saveMark beginFn, doFn, endFn = stateTable[curState] try: for byte in data: if byte == '\n': lineno += 1 colno = 0 else: colno += 1 newState = doFn(byte) if newState is not None and newState != curState: endFn() curState = newState beginFn, doFn, endFn = stateTable[curState] beginFn(byte) finally: self.saveMark = _saveMark self.lineno, self.colno = lineno, colno self.state = curState def connectionLost(self, reason): stateTable = self._buildStateTable() stateTable[self.state][END_HANDLER]() def do_begin(self, byte): if byte.isspace(): return if byte != '<': if self.beExtremelyLenient: self._leadingBodyData = byte return 'bodydata' self._parseError("First char of document [%r] wasn't <" % (byte,)) return 'tagstart' def begin_comment(self, byte): self.commentbuf = '' def do_comment(self, byte): self.commentbuf += byte if self.commentbuf.endswith('-->'): self.gotComment(self.commentbuf[:-3]) return 'bodydata' def begin_tagstart(self, byte): self.tagName = '' self.tagAttributes = {} self.termtag = 0 self.endtag = 0 def do_tagstart(self, byte): if byte.isalnum() or byte in identChars: self.tagName += byte if self.tagName == '!--': return 'comment' elif byte.isspace(): if self.tagName: if self.endtag: return 'waitforgt' return 'attrs' else: self._parseError("Whitespace before tag-name") elif byte == '>': if self.endtag: self.gotTagEnd(self.tagName) return 'bodydata' else: self.gotTagStart(self.tagName, {}) return (not self.beExtremelyLenient) and 'bodydata' or self.maybeBodyData() elif byte == '/': if self.tagName: return 'afterslash' else: self.endtag = 1 elif byte in '!?': if self.tagName: if not self.beExtremelyLenient: self._parseError("Invalid character in tag-name") else: self.tagName += byte self.termtag = 1 elif byte == '[': if self.tagName == '!': return 'expectcdata' else: self._parseError("Invalid '[' in tag-name") else: if self.beExtremelyLenient: self.bodydata = '<' return 'unentity' self._parseError('Invalid tag character: %r'% byte) def begin_unentity(self, byte): self.bodydata += byte def do_unentity(self, byte): self.bodydata += byte return 'bodydata' def end_unentity(self): self.gotText(self.bodydata) def begin_expectcdata(self, byte): self.cdatabuf = byte def do_expectcdata(self, byte): self.cdatabuf += byte cdb = self.cdatabuf cd = '[CDATA[' if len(cd) > len(cdb): if cd.startswith(cdb): return elif self.beExtremelyLenient: return 'waitforgt' else: self._parseError("Mal-formed CDATA header") if cd == cdb: self.cdatabuf = '' return 'cdata' self._parseError("Mal-formed CDATA header") def do_cdata(self, byte): self.cdatabuf += byte if self.cdatabuf.endswith("]]>"): self.cdatabuf = self.cdatabuf[:-3] return 'bodydata' def end_cdata(self): self.gotCData(self.cdatabuf) self.cdatabuf = '' def do_attrs(self, byte): if byte.isalnum() or byte in identChars: if self.tagName == '!DOCTYPE': return 'doctype' if self.tagName[0] in '!?': return 'waitforgt' return 'attrname' elif byte.isspace(): return elif byte == '>': self.gotTagStart(self.tagName, self.tagAttributes) return (not self.beExtremelyLenient) and 'bodydata' or self.maybeBodyData() elif byte == '/': return 'afterslash' elif self.beExtremelyLenient: return self._parseError("Unexpected character: %r" % byte) def begin_doctype(self, byte): self.doctype = byte def do_doctype(self, byte): if byte == '>': return 'bodydata' self.doctype += byte def end_doctype(self): self.gotDoctype(self.doctype) self.doctype = None def do_waitforgt(self, byte): if byte == '>': if self.endtag or not self.beExtremelyLenient: return 'bodydata' return self.maybeBodyData() def begin_attrname(self, byte): self.attrname = byte self._attrname_termtag = 0 def do_attrname(self, byte): if byte.isalnum() or byte in identChars: self.attrname += byte return elif byte == '=': return 'beforeattrval' elif byte.isspace(): return 'beforeeq' elif self.beExtremelyLenient: if byte in '"\'': return 'attrval' if byte in lenientIdentChars or byte.isalnum(): self.attrname += byte return if byte == '/': self._attrname_termtag = 1 return if byte == '>': self.attrval = 'True' self.tagAttributes[self.attrname] = self.attrval self.gotTagStart(self.tagName, self.tagAttributes) if self._attrname_termtag: self.gotTagEnd(self.tagName) return 'bodydata' return self.maybeBodyData() return self._parseError("Invalid attribute name: %r %r" % (self.attrname, byte)) def do_beforeattrval(self, byte): if byte in '"\'': return 'attrval' elif byte.isspace(): return elif self.beExtremelyLenient: if byte in lenientIdentChars or byte.isalnum(): return 'messyattr' if byte == '>': self.attrval = 'True' self.tagAttributes[self.attrname] = self.attrval self.gotTagStart(self.tagName, self.tagAttributes) return self.maybeBodyData() if byte == '\\': return self._parseError("Invalid initial attribute value: %r; Attribute values must be quoted." % byte) attrname = '' attrval = '' def begin_beforeeq(self,byte): self._beforeeq_termtag = 0 def do_beforeeq(self, byte): if byte == '=': return 'beforeattrval' elif byte.isspace(): return elif self.beExtremelyLenient: if byte.isalnum() or byte in identChars: self.attrval = 'True' self.tagAttributes[self.attrname] = self.attrval return 'attrname' elif byte == '>': self.attrval = 'True' self.tagAttributes[self.attrname] = self.attrval self.gotTagStart(self.tagName, self.tagAttributes) if self._beforeeq_termtag: self.gotTagEnd(self.tagName) return 'bodydata' return self.maybeBodyData() elif byte == '/': self._beforeeq_termtag = 1 return self._parseError("Invalid attribute") def begin_attrval(self, byte): self.quotetype = byte self.attrval = '' def do_attrval(self, byte): if byte == self.quotetype: return 'attrs' self.attrval += byte def end_attrval(self): self.tagAttributes[self.attrname] = self.attrval self.attrname = self.attrval = '' def begin_messyattr(self, byte): self.attrval = byte def do_messyattr(self, byte): if byte.isspace(): return 'attrs' elif byte == '>': endTag = 0 if self.attrval.endswith('/'): endTag = 1 self.attrval = self.attrval[:-1] self.tagAttributes[self.attrname] = self.attrval self.gotTagStart(self.tagName, self.tagAttributes) if endTag: self.gotTagEnd(self.tagName) return 'bodydata' return self.maybeBodyData() else: self.attrval += byte def end_messyattr(self): if self.attrval: self.tagAttributes[self.attrname] = self.attrval def begin_afterslash(self, byte): self._after_slash_closed = 0 def do_afterslash(self, byte): if self._after_slash_closed: self._parseError("Mal-formed") if byte != '>': if self.beExtremelyLenient: return else: self._parseError("No data allowed after '/'") self._after_slash_closed = 1 self.gotTagStart(self.tagName, self.tagAttributes) self.gotTagEnd(self.tagName) return 'bodydata' def begin_bodydata(self, byte): if self._leadingBodyData: self.bodydata = self._leadingBodyData del self._leadingBodyData else: self.bodydata = '' def do_bodydata(self, byte): if byte == '<': return 'tagstart' if byte == '&': return 'entityref' self.bodydata += byte def end_bodydata(self): self.gotText(self.bodydata) self.bodydata = '' def do_waitforendscript(self, byte): if byte == '<': return 'waitscriptendtag' self.bodydata += byte def begin_waitscriptendtag(self, byte): self.temptagdata = '' self.tagName = '' self.endtag = 0 def do_waitscriptendtag(self, byte): self.temptagdata += byte if byte == '/': self.endtag = True elif not self.endtag: self.bodydata += "<" + self.temptagdata return 'waitforendscript' elif byte.isalnum() or byte in identChars: self.tagName += byte if not 'script'.startswith(self.tagName): self.bodydata += "<" + self.temptagdata return 'waitforendscript' elif self.tagName == 'script': self.gotText(self.bodydata) self.gotTagEnd(self.tagName) return 'waitforgt' elif byte.isspace(): return 'waitscriptendtag' else: self.bodydata += "<" + self.temptagdata return 'waitforendscript' def begin_entityref(self, byte): self.erefbuf = '' self.erefextra = '' def do_entityref(self, byte): if byte.isspace() or byte == "<": if self.beExtremelyLenient: if self.erefbuf and self.erefbuf != "amp": self.erefextra = self.erefbuf self.erefbuf = "amp" if byte == "<": return "tagstart" else: self.erefextra += byte return 'spacebodydata' self._parseError("Bad entity reference") elif byte != ';': self.erefbuf += byte else: return 'bodydata' def end_entityref(self): self.gotEntityReference(self.erefbuf) def begin_spacebodydata(self, byte): self.bodydata = self.erefextra self.erefextra = None do_spacebodydata = do_bodydata end_spacebodydata = end_bodydata def gotTagStart(self, name, attributes): print 'begin', name, attributes def gotText(self, data): print 'text:', repr(data) def gotEntityReference(self, entityRef): print 'entityRef: &%s;' % entityRef def gotComment(self, comment): pass def gotCData(self, cdata): self.gotText(cdata)
Apache License 2.0
phenom4n4n/phen-cogs
baron/baron.py
Baron.baron_leave_commands
python
async def baron_leave_commands( self, ctx: commands.Context, commands: int, confirm: bool = False ): cog = self.bot.get_cog("CommandStats") data = await cog.config.guilddata() guilds = [] async for guild in AsyncIter(self.bot.guilds, steps=100): guild_data = data.get(str(guild.id), {}) total_commands = sum(guild_data.values()) if total_commands < commands: guilds.append(guild) if not guilds: await ctx.send( f"There are no servers with a command usage count less than {commands}." ) await self.leave_guilds( ctx, guilds, f"I have automatically left this server since it has used less than {commands} commands.", confirmed=confirm, )
Leave all servers that have used less commands than the given number.
https://github.com/phenom4n4n/phen-cogs/blob/ab86b6ee787378ba84e9e85991482ebf21dec75a/baron/baron.py#L535-L557
import asyncio import functools import time from io import BytesIO from typing import List, Literal, Optional, Tuple import discord from matplotlib import pyplot as plt from redbot.core import commands from redbot.core.bot import Red from redbot.core.commands import GuildConverter, TimedeltaConverter from redbot.core.config import Config from redbot.core.utils import AsyncIter from redbot.core.utils.chat_formatting import ( box, humanize_list, humanize_number, humanize_timedelta, pagify, ) from redbot.core.utils.menus import DEFAULT_CONTROLS, menu, start_adding_reactions from redbot.core.utils.predicates import ReactionPredicate RequestType = Literal["discord_deleted_user", "owner", "user", "user_strict"] def comstats_cog(ctx: commands.Context): return ctx.bot.get_cog("CommandStats") is not None def disabled_or_data(data): return data or "Disabled" class Baron(commands.Cog): __version__ = "1.2.3" def format_help_for_context(self, ctx): pre_processed = super().format_help_for_context(ctx) n = "\n" if "\n\n" not in pre_processed else "" return f"{pre_processed}{n}\nCog Version: {self.__version__}" default_global = { "limit": 0, "log_channel": None, "log_guild": None, "min_members": 0, "bot_ratio": 0, "whitelist": [], "blacklist": [], } def __init__(self, bot: Red) -> None: self.bot = bot self.config = Config.get_conf( self, identifier=325236743863625234572, force_registration=True, ) self.settings_cache = {} self.config.register_global(**self.default_global) async def red_delete_data_for_user(self, *, requester: RequestType, user_id: int) -> None: return async def build_cache(self): self.settings_cache = await self.config.all() @commands.is_owner() @commands.command(aliases=["guildsgrowth", "guildgraph", "guildsgraph"]) async def guildgrowth( self, ctx: commands.Context, *, time: TimedeltaConverter( allowed_units=["weeks", "days", "hours"], default_unit="weeks" ) = None, ): await ctx.trigger_typing() date = ctx.message.created_at - time if time else self.bot.user.created_at guilds = [ guild.me.joined_at async for guild in AsyncIter(self.bot.guilds, steps=100) if guild.me.joined_at.timestamp() > date.timestamp() ] if len(guilds) <= 1: return await ctx.send("There aren't enough server joins during that time.") task = functools.partial(self.create_graph, guilds) task = self.bot.loop.run_in_executor(None, task) try: buf = await asyncio.wait_for(task, timeout=60) except asyncio.TimeoutError: return await ctx.send( "An error occurred while generating this image. Try again later." ) e = discord.Embed(color=await ctx.embed_color(), title="Guilds Growth") e.set_image(url="attachment://attachment.png") await ctx.send(embed=e, file=discord.File(buf, "attachment.png")) buf.close() def create_graph(self, guilds: list): plt.clf() guilds.sort(key=lambda g: g) plt.grid(True) fig, ax = plt.subplots() ax.plot(guilds, tuple(range(len(guilds))), lw=2) fig.autofmt_xdate() plt.xlabel("Date") plt.ylabel("Guilds") buf = BytesIO() fig.savefig(buf, format="png") buf.seek(0) return buf @commands.is_owner() @commands.group() async def baron(self, ctx: commands.Context): @baron.command() async def settings(self, ctx: commands.Context): data = await self.config.all() log_chan = data["log_channel"] if log_chan := self.bot.get_channel(data["log_channel"]): log_chan = log_chan.mention description = [ f"Log Channel: {log_chan}", f"Server Limit: {disabled_or_data(data['limit'])}", f"Minimum Members: {disabled_or_data(data['min_members'])}", f"Bot Farm: {disabled_or_data(data['bot_ratio'])}", ] e = discord.Embed( color=await ctx.embed_color(), title="Baron Settings", description="\n".join(description), ) await ctx.send(embed=e) @baron.command() async def limit(self, ctx: commands.Context, limit: int = 0): await self.config.limit.set(limit) await ctx.send( f"The server limit has been set to {limit}." if limit else "The server limit has been disabled." ) await self.build_cache() @baron.command() async def channel(self, ctx: commands.Context, channel: discord.TextChannel = None): if channel: await self.config.log_channel.set(channel.id) await self.config.log_guild.set(channel.guild.id) await ctx.send(f"Baron's log channel has been set to {channel.mention}.") else: await self.config.log_channel.clear() await self.config.log_guild.clear() await ctx.send("Baron's log channel has been removed.") await self.build_cache() @baron.command(aliases=["wl"]) async def whitelist(self, ctx: commands.Context, guild_id: int = None): if not guild_id: e = discord.Embed( color=await ctx.embed_color(), title="Baron Whitelist", description=humanize_list(await self.config.whitelist()), ) await ctx.send(embed=e) else: if guild_id in await self.config.whitelist(): await ctx.send("This server is already whitelisted.") return async with self.config.whitelist() as w: w.append(guild_id) await ctx.tick() await self.build_cache() @baron.command(aliases=["unwl"]) async def unwhitelist(self, ctx: commands.Context, guild_id: int): if guild_id not in await self.config.whitelist(): await ctx.send("This server is not in the whitelist.") return async with self.config.whitelist() as w: index = w.index(guild_id) w.pop(index) await ctx.tick() await self.build_cache() @baron.command(aliases=["bl"]) async def blacklist(self, ctx: commands.Context, guild_id: int = None): if not guild_id: e = discord.Embed( color=await ctx.embed_color(), title="Baron Blacklist", description=humanize_list(await self.config.blacklist()), ) await ctx.send(embed=e) else: if guild_id in await self.config.blacklist(): await ctx.send("This server is already blacklisted.") return async with self.config.blacklist() as b: b.append(guild_id) await ctx.tick() await self.build_cache() @baron.command(aliases=["unbl"]) async def unblacklist(self, ctx: commands.Context, guild_id: int): if guild_id not in await self.config.blacklist(): await ctx.send("This server is not in the blacklist.") return async with self.config.blacklist() as b: index = b.index(guild_id) b.pop(index) await ctx.tick() await self.build_cache() @baron.command() async def minmembers(self, ctx: commands.Context, limit: Optional[int] = 0): await self.config.min_members.set(limit) await ctx.send( f"The minimum member limit has been set to {limit}." if limit else "The minimum member limit has been disabled." ) await self.build_cache() @baron.command() async def botratio(self, ctx: commands.Context, ratio: Optional[int] = 0): if ratio not in range(100): raise commands.BadArgument await self.config.bot_ratio.set(ratio) await ctx.send( f"The bot ratio has been set to {ratio}." if ratio else "The bot ratio has been removed." ) await self.build_cache() async def view_guilds( self, ctx: commands.Context, guilds: List[discord.Guild], title: str, page_length: int = 500, *, color: discord.Color = discord.Color.red(), footer: str = None, insert_function=None, ): page_length = max(100, min(2000, page_length)) data = await self.config.all() whitelist = data["whitelist"] desc = [] async for guild in AsyncIter(guilds, steps=100): bots = len([x async for x in AsyncIter(guild.members, steps=100) if x.bot]) percent = bots / guild.member_count guild_desc = [ f"{guild.name} - ({guild.id})", f"Members: **{humanize_number(guild.member_count)}**", f"Bots: **{round(percent * 100, 2)}%**", ] if insert_function: guild_desc.append(str(insert_function(guild))) if guild.id in whitelist: guild_desc.append("[Whitelisted](https://www.youtube.com/watch?v=oHg5SJYRHA0)") desc.append("\n".join(guild_desc)) pages = list(pagify("\n\n".join(desc), ["\n\n"], page_length=page_length)) embeds = [] base_embed = discord.Embed(color=color, title=title) bot_guilds = self.bot.guilds for index, page in enumerate(pages, 1): e = base_embed.copy() e.description = page footer_text = f"{index}/{len(pages)} | {len(guilds)}/{len(bot_guilds)} servers" if footer: footer_text += f" | {footer}" e.set_footer(text=footer_text) embeds.append(e) await menu(ctx, embeds, DEFAULT_CONTROLS) @baron.group(name="view") async def baron_view(self, ctx: commands.Context): @baron_view.command(name="botfarms") async def baron_view_botfarms( self, ctx: commands.Context, rate: Optional[int] = 75, page_length: Optional[int] = 500 ): bot_farms, ok_guilds = await self.get_bot_farms(rate / 100) if not bot_farms: return await ctx.send( f"There are no servers with a bot ratio higher or equal than {rate}%." ) await self.view_guilds( ctx, bot_farms, f"Bot Farms ({rate}%)", page_length, footer=f"OK guilds: {ok_guilds}" ) @baron_view.command(name="members") async def baron_view_members( self, ctx: commands.Context, members: int, less_than: Optional[bool] = True, page_length: Optional[int] = 500, ): if less_than: guilds = [ guild async for guild in AsyncIter(self.bot.guilds, steps=100) if guild.member_count < members ] else: guilds = [ guild async for guild in AsyncIter(self.bot.guilds, steps=100) if guild.member_count > members ] if not guilds: return await ctx.send( f"There are no servers with a member count {'less' if less_than else 'greater'} than {members}." ) await self.view_guilds(ctx, guilds, f"Server Members ({members})", page_length) @commands.check(comstats_cog) @baron_view.command(name="commands") async def baron_view_commands( self, ctx: commands.Context, commands: int, highest_first: Optional[bool] = False, page_length: Optional[int] = 500, ): cog = self.bot.get_cog("CommandStats") data = await cog.config.guilddata() guilds = [] guild_command_usage = {} async for guild in AsyncIter(self.bot.guilds, steps=100): guild_data = data.get(str(guild.id), {}) total_commands = sum(guild_data.values()) if total_commands < commands: guilds.append((guild, total_commands)) guild_command_usage[guild.id] = total_commands guilds.sort(key=lambda x: x[1], reverse=highest_first) if not guilds: return await ctx.send( f"There are no servers that have used less than {commands} commands." ) def insert_function(guild: discord.Guild): return f"Commands Used: **{guild_command_usage.get(guild.id, 0)}**" await self.view_guilds( ctx, [g async for g, c in AsyncIter(guilds, steps=100)], f"Command Usage ({commands})", page_length, insert_function=insert_function, ) @baron_view.command(name="unchunked") async def baron_view_unchunked( self, ctx: commands.Context, page_length: Optional[int] = 500, ): guilds = [g async for g in AsyncIter(self.bot.guilds, steps=100) if not g.chunked] if not guilds: return await ctx.send(f"There are no unchunked servers.") def insert_function(guild: discord.Guild): members = len(guild.members) percent = members / guild.member_count return f"Members Cached: **{humanize_number(members)} ({round(percent * 100, 2)})%**" await self.view_guilds( ctx, guilds, "Unchunked Servers", page_length, insert_function=insert_function ) @baron_view.command(name="ownedby") async def baron_view_ownedby( self, ctx: commands.Context, user: discord.User, page_length: Optional[int] = 500 ): bot_guilds = self.bot.guilds guilds = [g async for g in AsyncIter(bot_guilds, steps=100) if g.owner_id == user.id] if not guilds: return await ctx.send(f"**{user}** does not own any servers I am in.") owned_ratio = len(guilds) / len(bot_guilds) await self.view_guilds( ctx, guilds, f"Servers owned by {user}", footer=f"{user} owns {round(owned_ratio * 100, 8)}% of the bot's servers", ) @baron.group(name="leave") async def baron_leave(self, ctx: commands.Context): @baron_leave.command(name="mass") async def baron_leave_mass( self, ctx: commands.Context, guilds: commands.Greedy[GuildConverter], *, reason: Optional[str] = "I have left this server at the request of my owner.", ): if not guilds: raise commands.BadArgument await self.leave_guilds(ctx, guilds, reason) @baron_leave.command(name="botfarms") async def baron_leave_botfarms( self, ctx: commands.Context, rate: int = 75, confirm: bool = False ): if rate not in range(1, 100): raise commands.BadArgument guilds, _ = await self.get_bot_farms(rate / 100) if not guilds: await ctx.send(f"There are no servers with a bot ratio higher or equal than {rate}%.") return await self.leave_guilds( ctx, guilds, f"I have automatically left this server since it has a high bot to member ratio.", confirmed=confirm, ) @baron_leave.command(name="members") async def baron_leave_members( self, ctx: commands.Context, members: int, confirm: bool = False ): guilds = [ guild async for guild in AsyncIter(self.bot.guilds, steps=100) if guild.member_count < members ] if not guilds: await ctx.send(f"There are no servers with a member count less than {members}.") await self.leave_guilds( ctx, guilds, f"I have automatically left this server since it has less than {members} members.", confirmed=confirm, ) @baron_leave.command(name="blacklisted") async def baron_leave_blacklisted(self, ctx: commands.Context, confirm: bool = False): blacklist = await self.config.blacklist() guilds = [g async for g in AsyncIter(self.bot.guilds, steps=100) if g.id in blacklist] if not guilds: return await ctx.send(f"I'm not in any blacklisted servers.") await self.leave_guilds(ctx, guilds, None, notify_guilds=False, confirmed=confirm) @commands.check(comstats_cog) @baron_leave.command(name="commands")
MIT License
neo4j/neo4j-python-driver
neo4j/__init__.py
Driver.close
python
def close(self): self._pool.close()
Shut down, closing any open connections in the pool.
https://github.com/neo4j/neo4j-python-driver/blob/49bca20cfcd5e88a6eb353e8a5a70fb8efb72db8/neo4j/__init__.py#L308-L311
__all__ = [ "__version__", "GraphDatabase", "Driver", "BoltDriver", "Neo4jDriver", "Auth", "AuthToken", "basic_auth", "kerberos_auth", "bearer_auth", "custom_auth", "Bookmark", "ServerInfo", "Version", "READ_ACCESS", "WRITE_ACCESS", "DEFAULT_DATABASE", "TRUST_ALL_CERTIFICATES", "TRUST_SYSTEM_CA_SIGNED_CERTIFICATES", "Address", "IPv4Address", "IPv6Address", "Config", "PoolConfig", "WorkspaceConfig", "SessionConfig", "Record", "Transaction", "Result", "ResultSummary", "SummaryCounters", "Query", "Session", "unit_of_work", "ExperimentalWarning", ] from logging import getLogger from neo4j.addressing import ( Address, IPv4Address, IPv6Address, ) from neo4j.api import ( Auth, AuthToken, basic_auth, kerberos_auth, bearer_auth, custom_auth, Bookmark, ServerInfo, Version, READ_ACCESS, WRITE_ACCESS, SYSTEM_DATABASE, DEFAULT_DATABASE, TRUST_ALL_CERTIFICATES, TRUST_SYSTEM_CA_SIGNED_CERTIFICATES, ) from neo4j.conf import ( Config, PoolConfig, WorkspaceConfig, SessionConfig, ) from neo4j.meta import ( experimental, ExperimentalWarning, get_user_agent, version as __version__, ) from neo4j.data import ( Record, ) from neo4j.work.simple import ( Query, Session, unit_of_work, ) from neo4j.work.transaction import ( Transaction, ) from neo4j.work.result import ( Result, ) from neo4j.work.summary import ( ResultSummary, SummaryCounters, ) log = getLogger("neo4j") class GraphDatabase: @classmethod def driver(cls, uri, *, auth=None, **config): from neo4j.api import ( parse_neo4j_uri, parse_routing_context, DRIVER_BOLT, DRIVER_NEO4j, SECURITY_TYPE_NOT_SECURE, SECURITY_TYPE_SELF_SIGNED_CERTIFICATE, SECURITY_TYPE_SECURE, URI_SCHEME_BOLT, URI_SCHEME_NEO4J, URI_SCHEME_BOLT_SELF_SIGNED_CERTIFICATE, URI_SCHEME_BOLT_SECURE, URI_SCHEME_NEO4J_SELF_SIGNED_CERTIFICATE, URI_SCHEME_NEO4J_SECURE, ) driver_type, security_type, parsed = parse_neo4j_uri(uri) if "trust" in config.keys(): if config.get("trust") not in [TRUST_ALL_CERTIFICATES, TRUST_SYSTEM_CA_SIGNED_CERTIFICATES]: from neo4j.exceptions import ConfigurationError raise ConfigurationError("The config setting `trust` values are {!r}".format( [ TRUST_ALL_CERTIFICATES, TRUST_SYSTEM_CA_SIGNED_CERTIFICATES, ] )) if security_type in [SECURITY_TYPE_SELF_SIGNED_CERTIFICATE, SECURITY_TYPE_SECURE] and ("encrypted" in config.keys() or "trust" in config.keys()): from neo4j.exceptions import ConfigurationError raise ConfigurationError("The config settings 'encrypted' and 'trust' can only be used with the URI schemes {!r}. Use the other URI schemes {!r} for setting encryption settings.".format( [ URI_SCHEME_BOLT, URI_SCHEME_NEO4J, ], [ URI_SCHEME_BOLT_SELF_SIGNED_CERTIFICATE, URI_SCHEME_BOLT_SECURE, URI_SCHEME_NEO4J_SELF_SIGNED_CERTIFICATE, URI_SCHEME_NEO4J_SECURE, ] )) if security_type == SECURITY_TYPE_SECURE: config["encrypted"] = True elif security_type == SECURITY_TYPE_SELF_SIGNED_CERTIFICATE: config["encrypted"] = True config["trust"] = TRUST_ALL_CERTIFICATES if driver_type == DRIVER_BOLT: return cls.bolt_driver(parsed.netloc, auth=auth, **config) elif driver_type == DRIVER_NEO4j: routing_context = parse_routing_context(parsed.query) return cls.neo4j_driver(parsed.netloc, auth=auth, routing_context=routing_context, **config) @classmethod def bolt_driver(cls, target, *, auth=None, **config): from neo4j._exceptions import BoltHandshakeError, BoltSecurityError try: return BoltDriver.open(target, auth=auth, **config) except (BoltHandshakeError, BoltSecurityError) as error: from neo4j.exceptions import ServiceUnavailable raise ServiceUnavailable(str(error)) from error @classmethod def neo4j_driver(cls, *targets, auth=None, routing_context=None, **config): from neo4j._exceptions import BoltHandshakeError, BoltSecurityError try: return Neo4jDriver.open(*targets, auth=auth, routing_context=routing_context, **config) except (BoltHandshakeError, BoltSecurityError) as error: from neo4j.exceptions import ServiceUnavailable raise ServiceUnavailable(str(error)) from error class Direct: default_host = "localhost" default_port = 7687 default_target = ":" def __init__(self, address): self._address = address @property def address(self): return self._address @classmethod def parse_target(cls, target): if not target: target = cls.default_target address = Address.parse(target, default_host=cls.default_host, default_port=cls.default_port) return address class Routing: default_host = "localhost" default_port = 7687 default_targets = ": :17601 :17687" def __init__(self, initial_addresses): self._initial_addresses = initial_addresses @property def initial_addresses(self): return self._initial_addresses @classmethod def parse_targets(cls, *targets): targets = " ".join(targets) if not targets: targets = cls.default_targets addresses = Address.parse_list(targets, default_host=cls.default_host, default_port=cls.default_port) return addresses class Driver: _pool = None def __init__(self, pool): assert pool is not None self._pool = pool def __del__(self): self.close() def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.close() @property def encrypted(self): return bool(self._pool.pool_config.encrypted) def session(self, **config): raise NotImplementedError @experimental("The pipeline API is experimental and may be removed or changed in a future release") def pipeline(self, **config): raise NotImplementedError
Apache License 2.0
fearedbliss/bliss-initramfs
pkg/libs/Tools.py
Tools.Clean
python
def Clean(cls): os.chdir(var.home) if os.path.exists(var.temp): Tools.RemoveTree(var.temp) if os.path.exists(var.temp): cls.Warn("Failed to delete the " + var.temp + " directory. Exiting.") quit(1)
Check to see if the temporary directory exists, if it does, delete it for a fresh start.
https://github.com/fearedbliss/bliss-initramfs/blob/b8471f8bc5a91a811eb1de8acbb61573c6675175/pkg/libs/Tools.py#L91-L105
import os import json import argparse import pkg.libs.Variables as var from subprocess import call from subprocess import check_output class Tools: @classmethod def ProcessArguments(cls, Modules): user = Tools.Run("whoami")[0] if user != "root": cls.Fail("This program must be ran as root") parser = argparse.ArgumentParser( description="Builds an initramfs for booting from Encrypted/OpenZFS." ) parser.add_argument( "-c", "--config", help="Path to the settings.json. (i.e: /home/jon/settings.json)", ) parser.add_argument( "-k", "--kernel", required=True, help="The name of the kernel you are building the initramfs for. (i.e: 4.14.170-FC.01)", ) parser.add_argument( "-v", "--version", action="version", version="%(prog)s {}".format(var.version), help="Displays the version of this application.", ) args = parser.parse_args() if args.config: var.settingsPath = args.config if args.kernel: var.kernel = args.kernel @classmethod def PrintHeader(cls): print("-" * 30) Tools.Print( Tools.Colorize("yellow", var.name) + " - " + Tools.Colorize("pink", "v" + var.version) ) Tools.Print(var.contact) Tools.Print(var.license) print("-" * 30 + "\n") @classmethod def GetProgramPath(cls, vProg): cmd = "whereis " + vProg + ' | cut -d " " -f 2' results = check_output(cmd, shell=True, universal_newlines=True).strip() if results: return results else: cls.Fail("The " + vProg + " program could not be found!") @classmethod
Apache License 2.0
bitmovin/bitmovin-api-sdk-python
bitmovin_api_sdk/models/burn_in_subtitle_srt.py
BurnInSubtitleSrt.character_encoding
python
def character_encoding(self): return self._character_encoding
Gets the character_encoding of this BurnInSubtitleSrt. Character encoding of the SRT file (required) :return: The character_encoding of this BurnInSubtitleSrt. :rtype: CaptionCharacterEncoding
https://github.com/bitmovin/bitmovin-api-sdk-python/blob/79dd938804197151af7cbe5501c7ec1d97872c15/bitmovin_api_sdk/models/burn_in_subtitle_srt.py#L64-L73
from enum import Enum from six import string_types, iteritems from bitmovin_api_sdk.common.poscheck import poscheck_model from bitmovin_api_sdk.models.bitmovin_resource import BitmovinResource from bitmovin_api_sdk.models.caption_character_encoding import CaptionCharacterEncoding from bitmovin_api_sdk.models.input_path import InputPath import pprint import six class BurnInSubtitleSrt(BitmovinResource): @poscheck_model def __init__(self, id_=None, name=None, description=None, created_at=None, modified_at=None, custom_data=None, character_encoding=None, input_=None): super(BurnInSubtitleSrt, self).__init__(id_=id_, name=name, description=description, created_at=created_at, modified_at=modified_at, custom_data=custom_data) self._character_encoding = None self._input = None self.discriminator = None if character_encoding is not None: self.character_encoding = character_encoding if input_ is not None: self.input = input_ @property def openapi_types(self): types = {} if hasattr(super(BurnInSubtitleSrt, self), 'openapi_types'): types = getattr(super(BurnInSubtitleSrt, self), 'openapi_types') types.update({ 'character_encoding': 'CaptionCharacterEncoding', 'input': 'InputPath' }) return types @property def attribute_map(self): attributes = {} if hasattr(super(BurnInSubtitleSrt, self), 'attribute_map'): attributes = getattr(super(BurnInSubtitleSrt, self), 'attribute_map') attributes.update({ 'character_encoding': 'characterEncoding', 'input': 'input' }) return attributes @property
MIT License
stepfenshawn/cantonese
src/cantonese.py
node_import_new
python
def node_import_new(Node : list, name) -> None: Node.append(["node_import", name])
Node_import | name
https://github.com/stepfenshawn/cantonese/blob/5dce376170c2f89abd8a53b7ccb5c7c7ec4c0f63/src/cantonese.py#L334-L340
import cmd import re import sys import os import argparse from src.濑嘢 import 濑啲咩嘢 from src.stack_vm import * class lexer(object): def __init__(self, code, keywords): self.code = code self.keywords = keywords self.line = 1 self.re_new_line = re.compile(r"\r\n|\n\r|\n|\r") self.re_number = r"^0[xX][0-9a-fA-F]*(\.[0-9a-fA-F]*)?([pP][+\-]?[0-9]+)?|^[0-9]*(\.[0-9]*)?([eE][+\-]?[0-9]+)?" self.re_id = r"^[_\d\w]+|^[\u4e00-\u9fa5]+" self.re_str = r"(?s)(^'(\\\\|\\'|\\\n|\\z\s*|[^'\n])*')|(^\"(\\\\|\\\"|\\\n|\\z\s*|[^\"\n])*\")" self.re_expr = r"[|](.*?)[|]" self.re_callfunc = r"[&](.*?)[)]" self.op = r'(?P<op>(相加){1}|(加){1}|(减){1}|(乘){1}|(整除){1}|(除){1}|(余){1}|(异或){1}|(取反){1}|(左移){1}|(右移){1}' r'(与){1}(或者){1}|(或){1}|(系){1})|(同埋){1}|(自己嘅){1}|(比唔上){1}|(喺){1}' self.op_get_code = re.findall(re.compile(r'[(](.*?)[)]', re.S), self.op[5 : ]) self.op_gen_code = ["矩阵.matrix_addition", "+", "-", "*", "//", "/", "%", "^", "~", "<<", ">>", "&", "or", "|", "==", "and", "self.", '<', 'in'] self.build_in_funcs = r'(?P<build_in_funcs>(瞓){1}|(加啲){1}|(摞走){1}|(嘅长度){1}|(阵先){1}|' r'(畀你){1}|(散水){1})' self.bif_get_code = re.findall(re.compile(r'[(](.*?)[)]', re.S), self.build_in_funcs[19 :]) self.bif_gen_code = ["sleep", "append", "remove", ".__len__()", "2", "input", "clear"] def make_rep(self, list1 : list, list2 : list) -> list: assert len(list1) == len(list2) ret = [] for i in range(len(list1)): ret.append([list1[i], list2[i]]) return ret def trans(self, code : str, rep : str) -> str: p = re.match(r'\|(.*)同(.*)有几衬\|', code, re.M|re.I) if p: code = " corr(" + p.group(1) +", " + p.group(2) + ") " for r in rep: code = code.replace(r[0], r[1]) return code def next(self, n): self.code = self.code[n:] def check(self, s): return self.code.startswith(s) @staticmethod def is_white_space(c): return c in ('\t', '\n', '\v', '\f', '\r', ' ') @staticmethod def is_new_line(c): return c in ('\r', '\n') @staticmethod def isChinese(word): for ch in word: if '\u4e00' <= ch <= '\u9fff': return True return False def skip_space(self): while len(self.code) > 0: if self.check('\r\n') or self.check('\n\r'): self.next(2) self.line += 1 elif self.is_new_line(self.code[0]): self.next(1) self.line += 1 elif self.check('?') or self.check(':') or self.check(':') or self.check('?'): self.next(1) elif self.is_white_space(self.code[0]): self.next(1) else: break def scan(self, pattern): m = re.match(pattern, self.code) if m: token = m.group() self.next(len(token)) return token def scan_identifier(self): return self.scan(self.re_id) def scan_expr(self): return self.scan(self.re_expr) def scan_number(self): return self.scan(self.re_number) def scan_callfunc(self): return self.scan(self.re_callfunc) def scan_short_string(self): m = re.match(self.re_str, self.code) if m: s = m.group() self.next(len(s)) return s self.error('unfinished string') return '' def error(self, f, *args): err = f.format(*args) err = '{0}: {1}'.format(self.line, err) raise Exception(err) def get_token(self): self.skip_space() if len(self.code) == 0: return [self.line, ['EOF', 'EOF']] c = self.code[0] if c == '&': token = self.scan_callfunc() + ')' token = self.trans(token, self.make_rep(self.bif_get_code, self.bif_gen_code)) return [self.line, ['expr', token]] if c == '|': token = self.scan_expr() token = self.trans(token, self.make_rep(self.bif_get_code, self.bif_gen_code)) token = self.trans(token, self.make_rep(self.op_get_code, self.op_gen_code)) return [self.line, ['expr', token]] if c == '-': if self.check('->'): self.next(2) return [self.line, ['keyword', '->']] if c == '$': self.next(1) return [self.line, ['keyword', '$']] if c == '@': self.next(1) return [self.line, ['keyword', '@']] if c == '{': self.next(1) return [self.line, ['keyword', '{']] if c == '}': self.next(1) return [self.line, ['keyword', '}']] if self.isChinese(c) or c == '_' or c.isalpha(): token = self.scan_identifier() if token in self.keywords: return [self.line, ['keywords', token]] return [self.line, ['identifier', token]] if c in ('\'', '"'): return [self.line, ['string', self.scan_short_string()]] if c == '.' or c.isdigit(): token = self.scan_number() return [self.line, ['num', token]] self.error("睇唔明嘅Token: " + c) def escape(self, s): ret = '' while len(s) > 0: if s[0] != '\\': ret += s[0] s = s[1:] continue if len(s) == 1: self.error('unfinished string') if s[1] == 'a': ret += '\a' s = s[2:] continue elif s[1] == 'b': ret += '\b' s = s[2:] continue elif s[1] == 'f': ret += '\f' s = s[2:] continue elif s[1] == 'n' or s[1] == '\n': ret += '\n' s = s[2:] continue elif s[1] == 'r': ret += '\r' s = s[2:] continue elif s[1] == 't': ret += '\t' s = s[2:] continue elif s[1] == 'v': ret += '\v' s = s[2:] continue elif s[1] == '"': ret += '"' s = s[2:] continue elif s[1] == '\'': ret += '\'' s = s[2:] continue elif s[1] == '\\': ret += '\\' s = s[2:] continue def cantonese_token(code : str, keywords : str) -> list: lex = lexer(code, keywords) tokens = [] while True: token = lex.get_token() tokens.append(token) if token[1] == ['EOF', 'EOF']: break return tokens def node_print_new(Node : list, arg) -> None: Node.append(["node_print", arg]) def node_sleep_new(Node : list, arg) -> None: Node.append(["node_sleep", arg]) def node_break_new(Node : list) -> None: Node.append(["node_break"]) def node_exit_new(Node : list) -> None: Node.append(["node_exit"]) def node_let_new(Node : list, key ,value) -> None: Node.append(["node_let", key, value]) def node_if_new(Node : list, cond, stmt) -> None: Node.append(["node_if", cond, stmt]) def node_elif_new(Node : list, cond, stmt) -> None: Node.append(["node_elif", cond, stmt]) def node_else_new(Node : list, stmt) -> None: Node.append(["node_else", stmt]) def node_loop_new(Node : list, cond, stmt) -> None: Node.append(["node_loop", cond, stmt]) def node_func_new(Node : list, func_name, args, body) -> None: Node.append(["node_fundef", func_name, args, body]) def node_call_new(Node : list, func_name) -> None: Node.append(["node_call", func_name]) def node_build_in_func_call_new(Node : list, var, func_name, args) -> None: Node.append(["node_bcall", var, func_name, args])
MIT License
steemit/hivemind
hive/server/bridge_api/support.py
get_post_header
python
async def get_post_header(context, author, permlink): db = context['db'] sql = """SELECT id, parent_id, author, permlink, category, depth FROM hive_posts WHERE author = :author AND permlink = :permlink AND is_deleted = '0'""" row = await db.query_row(sql, author=author, permlink=permlink) if not row: return None return dict( author=row['author'], permlink=row['permlink'], category=row['category'], depth=row['depth'])
Fetch basic post data
https://github.com/steemit/hivemind/blob/d99b852e1ad321aeb67eaec5fb03f7bfb32c75d6/hive/server/bridge_api/support.py#L19-L37
import logging import traceback from hive.server.bridge_api.objects import _condenser_post_object from hive.utils.post import post_to_internal from hive.utils.normalize import sbd_amount from hive.server.common.helpers import ( return_error_info) log = logging.getLogger(__name__) ROLES = {-2: 'muted', 0: 'guest', 2: 'member', 4: 'admin', 6: 'mod', 8: 'admin'} @return_error_info
MIT License
wolkabout/wolkconnect-python
wolk/wolk_connect.py
WolkConnect.with_custom_connectivity
python
def with_custom_connectivity( self, connectivity_service: ConnectivityService ): self.logger.debug(f"Connectivity service: {connectivity_service}") if not isinstance(connectivity_service, ConnectivityService): raise ValueError("Invalid connectivity service provided") self.connectivity_service = connectivity_service self.connectivity_service.set_inbound_message_listener( self._on_inbound_message ) return self
Provide a custom way to communicate with the Platform. :param connectivity_service: Custom connectivity service :type connectivity_service: ConnectivityService
https://github.com/wolkabout/wolkconnect-python/blob/11412e3f88911170f587b5e857d07ab41c8f52b5/wolk/wolk_connect.py#L376-L393
import os from inspect import signature from typing import Callable from typing import Dict from typing import List from typing import Optional from typing import Tuple from typing import Union from wolk import logger_factory from wolk.interface.connectivity_service import ConnectivityService from wolk.interface.file_management import FileManagement from wolk.interface.firmware_handler import FirmwareHandler from wolk.interface.firmware_update import FirmwareUpdate from wolk.interface.message_deserializer import MessageDeserializer from wolk.interface.message_factory import MessageFactory from wolk.interface.message_queue import MessageQueue from wolk.message_deque import MessageDeque from wolk.model.actuator_status import ActuatorStatus from wolk.model.alarm import Alarm from wolk.model.device import Device from wolk.model.file_management_error_type import FileManagementErrorType from wolk.model.file_management_status import FileManagementStatus from wolk.model.file_management_status_type import FileManagementStatusType from wolk.model.firmware_update_error_type import FirmwareUpdateErrorType from wolk.model.firmware_update_status import FirmwareUpdateStatus from wolk.model.firmware_update_status_type import FirmwareUpdateStatusType from wolk.model.message import Message from wolk.model.sensor_reading import SensorReading from wolk.model.state import State from wolk.mqtt_connectivity_service import MQTTConnectivityService as MQTTCS from wolk.os_file_management import OSFileManagement from wolk.os_firmware_update import OSFirmwareUpdate from wolk.repeating_timer import RepeatingTimer from wolk.wolkabout_protocol_message_deserializer import ( WolkAboutProtocolMessageDeserializer as WAPMDeserializer, ) from wolk.wolkabout_protocol_message_factory import ( WolkAboutProtocolMessageFactory as WAPMFactory, ) ConfigurationValue = Union[ bool, int, float, str, ] ActuatorValue = Tuple[State, Optional[Union[bool, int, float, str]]] ReadingValue = Union[ bool, int, Tuple[int, ...], float, Tuple[float, ...], str, ] class WolkConnect: def __init__( self, device: Device, host: Optional[str] = None, port: Optional[int] = None, ca_cert: Optional[str] = None, ): logger_factory.logger_factory.set_device_key(device.key) self.logger = logger_factory.logger_factory.get_logger( str(self.__class__.__name__) ) self.logger.debug( f"Device: {device} ; " f"Host: {host} ; Port: {port} ; ca_cert: {ca_cert}" ) self.device = device self.actuation_handler: Optional[ Callable[[str, Union[bool, int, float, str]], None] ] = None self.actuator_status_provider: Optional[ Callable[[str], ActuatorValue] ] = None self.configuration_handler: Optional[ Callable[[Dict[str, ConfigurationValue]], None] ] = None self.configuration_provider: Optional[ Callable[[None], Dict[str, ConfigurationValue]] ] = None self.file_management: Optional[FileManagement] = None self.firmware_update: Optional[FirmwareUpdate] = None self.message_queue: MessageQueue = MessageDeque() self.message_factory: MessageFactory = WAPMFactory(device.key) self.message_deserializer: MessageDeserializer = WAPMDeserializer( self.device ) wolk_ca_cert = os.path.join(os.path.dirname(__file__), "ca.crt") last_will_message = self.message_factory.make_last_will_message() if host and port and ca_cert: self.connectivity_service: ConnectivityService = MQTTCS( device, self.message_deserializer.get_inbound_topics(), last_will_message, host=host, port=int(port), ca_cert=ca_cert, ) elif host and port: self.connectivity_service = MQTTCS( device, self.message_deserializer.get_inbound_topics(), last_will_message, host=host, port=int(port), ) else: self.connectivity_service = MQTTCS( device, self.message_deserializer.get_inbound_topics(), last_will_message, ca_cert=wolk_ca_cert, ) self.connectivity_service.set_inbound_message_listener( self._on_inbound_message ) self.keep_alive_service_enabled = True self.keep_alive_interval = 60 self.keep_alive_service: Optional[RepeatingTimer] = None self.last_platform_timestamp: Optional[int] = None def with_actuators( self, actuation_handler: Callable[[str, Union[bool, int, float, str]], None], actuator_status_provider: Callable[[str], ActuatorValue], ): self.logger.debug( f"Actuation handler: {actuation_handler} ; " f"Actuator status provider: {actuator_status_provider}" ) if not callable(actuation_handler): raise ValueError(f"{actuation_handler} is not a callable!") if len(signature(actuation_handler).parameters) != 2: raise ValueError(f"{actuation_handler} invalid signature!") self.actuation_handler = actuation_handler if not callable(actuator_status_provider): raise ValueError(f"{actuator_status_provider} is not a callable!") if len(signature(actuator_status_provider).parameters) != 1: raise ValueError(f"{actuator_status_provider} invalid signature!") self.actuator_status_provider = actuator_status_provider return self def with_configuration( self, configuration_handler: Callable[[Dict[str, ConfigurationValue]], None], configuration_provider: Callable[ [None], Dict[str, ConfigurationValue] ], ): self.logger.debug( f"Configuration handler: {configuration_handler} ; " f"Configuration provider: {configuration_provider}" ) if not callable(configuration_handler): raise ValueError(f"{configuration_handler} is not a callable!") if len(signature(configuration_handler).parameters) != 1: raise ValueError(f"{configuration_handler} invalid signature!") self.configuration_handler = configuration_handler if not callable(configuration_provider): raise ValueError(f"{configuration_provider} is not a callable!") if len(signature(configuration_provider).parameters) != 0: raise ValueError(f"{configuration_provider} invalid signature!") self.configuration_provider = configuration_provider return self def with_file_management( self, preferred_package_size: int, max_file_size: int, file_directory: str, custom_url_download: Optional[Callable[[str, str], bool]] = None, ): self.logger.debug( f"Preferred package size: {preferred_package_size}, " f"maximum file size: {max_file_size}, " f"file directory: '{file_directory}'" ) self.file_management = OSFileManagement( self._on_file_upload_status, self._on_package_request, self._on_file_url_status, ) self.file_management.configure( preferred_package_size, max_file_size, file_directory, ) if custom_url_download is not None: self.file_management.set_custom_url_downloader(custom_url_download) return self def with_firmware_update(self, firmware_handler: FirmwareHandler): self.logger.debug(f"Firmware handler: {firmware_handler}") if self.file_management is None: raise RuntimeError( "File management must be enabled before firmware update" ) self.firmware_update = OSFirmwareUpdate( firmware_handler, self._on_firmware_update_status ) message = self.message_factory.make_from_firmware_version_update( self.firmware_update.get_current_version() ) self.message_queue.put(message) self.firmware_update.report_result() return self def with_custom_message_queue(self, message_queue: MessageQueue): self.logger.debug(f"Message queue: {message_queue}") if not isinstance(message_queue, MessageQueue): raise ValueError( "Provided message queue does not implement MessageQueue" ) self.message_queue = message_queue return self def with_custom_protocol( self, message_factory: MessageFactory, message_deserializer: MessageDeserializer, ): self.logger.debug( f"Message factory: {message_factory} ; " f"message deserializer: {message_deserializer}" ) if not isinstance(message_factory, MessageFactory): raise ValueError("Invalid message factory provided") self.message_factory = message_factory if not isinstance(message_deserializer, MessageDeserializer): raise ValueError("Invalid message deserializer provided") self.message_deserializer = message_deserializer return self
Apache License 2.0
google-tasks-backup/tasks-backup
shared.py
escape_html
python
def escape_html(text): if text is None: return None return cgi.escape(text).encode('ascii', 'xmlcharrefreplace').replace('\n','<br />')
Ensure that text is properly escaped as valid HTML
https://github.com/google-tasks-backup/tasks-backup/blob/ffcb2044eb6089d20e1be3f93025fa33c2efbe3e/shared.py#L216-L225
import cgi import sys import os import traceback import logging import datetime import base64 import unicodedata from urlparse import urljoin from google.appengine.api import logservice from google.appengine.api import mail from google.appengine.api import urlfetch from google.appengine.api.app_identity import get_application_id from google.appengine.ext.webapp import template from Crypto.PublicKey import RSA from Crypto.Cipher import PKCS1_OAEP from Crypto.Cipher import AES from Crypto.Util import Counter import settings import constants import appversion import host_settings real_fetch = urlfetch.fetch def fetch_with_deadline(url, *args, **argv): argv['deadline'] = settings.URL_FETCH_TIMEOUT return real_fetch(url, *args, **argv) urlfetch.fetch = fetch_with_deadline logservice.AUTOFLUSH_EVERY_SECONDS = 5 logservice.AUTOFLUSH_EVERY_BYTES = None logservice.AUTOFLUSH_EVERY_LINES = 5 logservice.AUTOFLUSH_ENABLED = True class DailyLimitExceededError(Exception): msg = "Daily limit exceeded. Please try again after midnight Pacific Standard Time." def __init__(self, msg = None): if msg: self.msg = msg class GtbDecryptionError(Exception): def __init__(self, msg): self.msg = msg super(GtbDecryptionError, self).__init__(msg) def set_cookie( req, name, val, use_request_path=False, path='/', cookie_name_prefix=''): if not isinstance(val, str): val = str(val) if cookie_name_prefix and name: name = cookie_name_prefix + '__' + name if not isinstance(path, str): path = str(path) if use_request_path: path = req.request.path expires_dt = datetime.datetime.utcnow() + datetime.timedelta(days=10 * 365) secure = True if is_dev_server() and req.request.scheme == 'http': secure = False req.response.set_cookie(name, val, expires=expires_dt, overwrite=True, secure=secure, path=path) def format_exception_info(max_tb_level=5): cla, exc, trbk = sys.exc_info() exc_name = cla.__name__ try: exc_args = exc.__dict__["args"] except KeyError: exc_args = "<no args>" exc_tb = traceback.format_tb(trbk, max_tb_level) return (exc_name, exc_args, exc_tb) def get_exception_name(): cla, _, _ = sys.exc_info() exc_name = cla.__name__ return str(exc_name) def get_exception_msg(ex = None): line_num = u'' msg = u'' ex_msg = u"No exception occured" cla, exc, trbk = sys.exc_info() try: line_num = trbk.tb_lineno except: pass if cla: exc_name = cla.__name__ if line_num: ex_msg = u"{}: {} at line {}".format(exc_name, exc.message, line_num) else: ex_msg = u"{}: {}".format(exc_name, exc.message) if ex: try: e_msg = unicode(ex) exc_name = ex.__class__.__name__ msg = "{}: {}".format(exc_name, e_msg) except: msg = u"Unable to process 'ex'. Most recent exception = " + ex_msg if msg: return msg return ex_msg def is_test_user(user_email): return (user_email.lower() in (email.lower() for email in settings.TEST_ACCOUNTS)) def dump_obj(obj): for attr in dir(obj): logging.debug(" obj.%s = %s" % (attr, getattr(obj, attr))) logservice.flush()
Apache License 2.0
francium/microurl
microurl.py
read_data
python
def read_data(query): with db: data = db.query_micro_link(query) if not(data): raise KeyError('{} not found in database'.format(query)) else: return data[2]
Search for and return a query in the DB otherwise raise Exception.
https://github.com/francium/microurl/blob/c4ae80fa82fd175198608ccb7a6f49d545db8078/microurl.py#L192-L202
import os import json import sys import time from flask import abort, Flask, redirect, render_template, request, send_from_directory from validators import domain as domaincheck from validators import ipv4 as ipcheck from validators import url as urlcheck import database import database_cleaner import random_micro app = Flask(__name__) db = database.DB_Interface() @app.route('/') def route_index(): return render_template('index.html') @app.route('/about') def route_about(): return render_template('about.html') @app.route('/top') def route_top(): return render_template('top.html', registry=read_top()) @app.route('/recent') def route_recent(): return render_template('recent.html', registry=read_recent()) @app.route('/generate_micro', methods=['POST']) def route_generate_micro(): data = parse_form_data(request.form) url = data['url'].strip() micro = get_micro(url) if not micro: micro = generate_micro() register_micro(micro, url, data['public']) return json.dumps({"status": "OK", "micro": micro, "error": ""}) @app.route('/<micro>') def route_micro(micro): try: temp = lookup_micro(micro) if urlcheck(temp): return redirect(temp) elif domaincheck(temp): return redirect("http://" + temp) elif ipcheck(temp.split(':')[0]) and urlcheck('http://' + temp): return redirect("http://" + temp) else: abort(404) except Exception as e: sys.stderr.write(str(e)) abort(404) @app.errorhandler(404) def route_404(error): return 'invalid url' @app.route('/favicon.ico') def favicon(): return send_from_directory(os.path.join(app.root_path, 'static'), 'favicon.ico') def parse_form_data(form_data): try: if form_data['public'] == 'on': public = True except KeyError: public = False url = form_data['url'] return {'url': url, 'public': public} def generate_micro(): return random_micro.random_words(3) def lookup_micro(micro): try: data = read_data(micro) increment_hit(micro) return data except KeyError as e: raise e def get_micro(url): with db: result = db.query_real_link(url) if result: return result[0] return None def register_micro(micro, url, public): DAY_SECS = 24 * 60 * 60 with db: tnow = int(time.time()) rc = db.insert(micro, url, tnow, tnow + DAY_SECS, public) def read_top(): with db: data = db.get_top() if not(data): return {'': 'nothing here'} else: return {d[1] : d[2] for d in data} def read_recent(): with db: data = db.get_recent() if not(data): return {'': 'nothing here'} else: return {d[1] : d[2] for d in data}
MIT License
pennlabs/penn-courses
backend/plan/management/commands/trainrecommender.py
normalize_class_name
python
def normalize_class_name(class_name): course_obj: Course = lookup_course(class_name) if course_obj is None: return class_name class_name = ( class_name if course_obj.primary_listing is None else course_obj.primary_listing.full_code ) return class_name
Take in a class name and return the standard name for that class
https://github.com/pennlabs/penn-courses/blob/6fd16c151e34a9660e883a41458a72cef6c1f8cd/backend/plan/management/commands/trainrecommender.py#L291-L301
import codecs import csv import math import os import pickle from typing import Dict, Iterable, List, Tuple import numpy as np from django.core.cache import cache from django.core.management.base import BaseCommand from sklearn.cluster import KMeans from sklearn.decomposition import PCA, TruncatedSVD from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.preprocessing import normalize from courses.models import Course from PennCourses.settings.base import S3_client, S3_resource from plan.models import Schedule def lookup_course(course): try: return Course.objects.filter(full_code=course).latest("semester") except Course.DoesNotExist: return None def courses_data_from_db(): user_to_semester_to_courses = dict() for schedule in Schedule.objects.prefetch_related("sections").all(): if schedule.person_id not in user_to_semester_to_courses: user_to_semester_to_courses[schedule.person_id] = dict() if schedule.semester not in user_to_semester_to_courses[schedule.person_id]: user_to_semester_to_courses[schedule.person_id][schedule.semester] = set() for section in schedule.sections.all(): user_to_semester_to_courses[schedule.person_id][schedule.semester].add( section.course.full_code ) for person_id in user_to_semester_to_courses: for semester in user_to_semester_to_courses[person_id]: for course_code in user_to_semester_to_courses[person_id][semester]: yield person_id, course_code, semester def courses_data_from_csv(course_data_path): with open(course_data_path) as course_data_file: data_reader = csv.reader(course_data_file) for row in data_reader: yield tuple(row) def courses_data_from_s3(): for row in csv.reader( codecs.getreader("utf-8")( S3_client.get_object(Bucket="penn.courses", Key="course_data.csv")["Body"] ) ): yield tuple(row) def get_description(course): course_obj = lookup_course(course) if course_obj is None or not course_obj.description: return "" return course_obj.description def vectorize_courses_by_description(courses): descriptions = [get_description(course) for course in courses] vectorizer = TfidfVectorizer() has_nonempty_descriptions = ( sum(1 for description in descriptions if description and len(description) > 0) > 0 ) if has_nonempty_descriptions: vectors = vectorizer.fit_transform(descriptions) else: vectors = np.array([[0] for _ in descriptions]) _, dim = vectors.shape if dim >= 500: dim_reducer = TruncatedSVD(n_components=500) vectors = dim_reducer.fit_transform(vectors) return normalize(vectors) def group_courses(courses_data: Iterable[Tuple[int, str, str]]): courses_by_semester_by_user: Dict[int, Dict[str, Dict[str, int]]] = dict() for person_id, course, semester in courses_data: course = normalize_class_name(course) if person_id not in courses_by_semester_by_user: user_dict = dict() courses_by_semester_by_user[person_id] = user_dict else: user_dict = courses_by_semester_by_user[person_id] if semester not in user_dict: semester_courses_multiset = dict() user_dict[semester] = semester_courses_multiset else: semester_courses_multiset = user_dict[semester] if course in semester_courses_multiset: semester_courses_multiset[course] += 1 else: semester_courses_multiset[course] = 1 return courses_by_semester_by_user def vectorize_by_copresence( courses_by_semester_by_user, as_past_class=False ) -> Dict[str, np.ndarray]: courses_set = set() for _, courses_by_semester in courses_by_semester_by_user.items(): for _, course_multiset in courses_by_semester.items(): for course, _ in course_multiset.items(): courses_set.add(course) courses_list = list(courses_set) course_to_index = {course: i for i, course in enumerate(courses_list)} copresence_vectors_by_course = {course: np.zeros(len(courses_list)) for course in courses_list} order_vectors_by_course = {course: np.zeros(len(courses_list)) for course in courses_list} for user, courses_by_semester in courses_by_semester_by_user.items(): for sem, course_multiset in courses_by_semester.items(): for course_a, frequency_a in course_multiset.items(): index_a = course_to_index[course_a] relevant_vector_a = copresence_vectors_by_course[course_a] if not as_past_class: for course_b, frequency_b in course_multiset.items(): co_frequency = min(frequency_a, frequency_b) index_b = course_to_index[course_b] relevant_vector_a[index_b] += co_frequency relevant_vector_a[index_a] += frequency_a ordered_sems = sorted(courses_by_semester.keys()) for i, sem in enumerate(ordered_sems): courses_first_sem = courses_by_semester[sem] start_sem_index = i if as_past_class else i + 1 for later_sem in ordered_sems[start_sem_index:]: courses_later_sem = courses_by_semester[later_sem] for course_later, freq1 in courses_later_sem.items(): add_to_copres = as_past_class and later_sem != ordered_sems[start_sem_index] for course_earlier, freq2 in courses_first_sem.items(): earlier_index = course_to_index[course_earlier] cofreq = min(freq1, freq2) order_vectors_by_course[course_later][earlier_index] += cofreq if add_to_copres: later_index = course_to_index[course_later] copresence_vectors_by_course[course_earlier][later_index] += cofreq concatenated = { key: order_vectors_by_course[key] + copresence_vectors_by_course[key] for key in order_vectors_by_course } return concatenated def vectorize_courses_by_schedule_presence(courses_by_user: List[Dict[str, int]]): num_users = len(courses_by_user) course_vectors_dict = {} for user_index, user_courses in enumerate(courses_by_user): for course, frequency in user_courses.items(): relevant_vector: np.ndarray if course not in course_vectors_dict: relevant_vector = np.zeros(num_users) course_vectors_dict[course] = relevant_vector else: relevant_vector = course_vectors_dict[course] relevant_vector[user_index] = frequency courses, vectors = zip(*course_vectors_dict.items()) vectors = np.array(vectors) _, dims = vectors.shape dim_reduced_components = round(math.log2(num_users + 2)) if min(dims, dim_reduced_components) > 5: dim_reducer = PCA(n_components=dim_reduced_components) dim_reduced = dim_reducer.fit_transform(vectors) else: dim_reduced = np.array(vectors) scaled = normalize(dim_reduced) return {course: scaled for course, scaled in zip(courses, scaled)} def get_unsequenced_courses_by_user(courses_by_semester_by_user): unsequenced_courses_by_user = {} for user, courses_by_semester in courses_by_semester_by_user.items(): combined_multiset = {} for semester, course_multiset in courses_by_semester.items(): for course, frequency in course_multiset.items(): combined_multiset[course] = frequency unsequenced_courses_by_user[user] = combined_multiset return list(unsequenced_courses_by_user.values()) def generate_course_vectors_dict(courses_data, use_descriptions=True): courses_to_vectors_curr = {} courses_to_vectors_past = {} grouped_courses = group_courses(courses_data) copresence_vectors_by_course = vectorize_by_copresence(grouped_courses) copresence_vectors_by_course_past = vectorize_by_copresence(grouped_courses, as_past_class=True) courses_by_user = get_unsequenced_courses_by_user(grouped_courses) courses, courses_vectorized_by_schedule_presence = zip( *vectorize_courses_by_schedule_presence(courses_by_user).items() ) courses_vectorized_by_description = vectorize_courses_by_description(courses) copresence_vectors = [copresence_vectors_by_course[course] for course in courses] copresence_vectors_past = [copresence_vectors_by_course_past[course] for course in courses] copresence_vectors = normalize(copresence_vectors) copresence_vectors_past = normalize(copresence_vectors_past) _, dims = copresence_vectors_past.shape dim_reduced_components = round(30 * math.log2(len(courses))) if min(dims, dim_reduced_components) > 5: dim_reduce = TruncatedSVD(n_components=dim_reduced_components) copresence_vectors = dim_reduce.fit_transform(copresence_vectors) dim_reduce = TruncatedSVD(n_components=dim_reduced_components) copresence_vectors_past = dim_reduce.fit_transform(copresence_vectors_past) for ( course, schedule_vector, description_vector, copresence_vector, copresence_vector_past, ) in zip( courses, courses_vectorized_by_schedule_presence, courses_vectorized_by_description, copresence_vectors, copresence_vectors_past, ): if use_descriptions: if np.linalg.norm(description_vector) == 0: continue total_vector_curr = np.concatenate( [schedule_vector, description_vector, copresence_vector * 2] ) total_vector_past = np.concatenate( [schedule_vector, description_vector, copresence_vector_past * 2] ) else: total_vector_curr = np.concatenate([schedule_vector, copresence_vector * 2]) total_vector_past = np.concatenate([schedule_vector, copresence_vector_past * 2]) courses_to_vectors_curr[course] = total_vector_curr / np.linalg.norm(total_vector_curr) courses_to_vectors_past[course] = total_vector_past / np.linalg.norm(total_vector_past) return courses_to_vectors_curr, courses_to_vectors_past
MIT License
snakedragondevs/leviathan
leviathan/network/engine/connection.py
Handler.receive_message
python
def receive_message(self, message):
Receive a message from the given connection. Args: message: The payload of a Packet, as a string.
https://github.com/snakedragondevs/leviathan/blob/fb08db7e4cd3ad182981fc71224011f5950063b8/leviathan/network/engine/connection.py#L558-L563
import abc import collections import enum import random from twisted.internet import reactor, task from leviathan.network.engine import constants, heap, packet REACTOR = reactor State = enum.Enum('State', ('CONNECTING', 'CONNECTED', 'SHUTDOWN')) class Connection(object): _Address = collections.namedtuple('Address', ['ip', 'port']) class ScheduledPacket(object): def __init__(self, rudp_packet, timeout, timeout_cb, retries=0): self.rudp_packet = rudp_packet self.timeout = timeout self.timeout_cb = timeout_cb self.retries = retries def __repr__(self): return '{0}({1}, {2}, {3}, {4})'.format( self.__class__.__name__, self.rudp_packet, self.timeout, self.timeout_cb, self.retries ) def __init__(self, proto, handler, own_addr, dest_addr, relay_addr=None): self.own_addr = self._Address(*own_addr) self.dest_addr = self._Address(*dest_addr) if relay_addr is None: self.relay_addr = dest_addr else: self.relay_addr = self._Address(*relay_addr) self.handler = handler self._proto = proto self._state = State.CONNECTING self._next_sequence_number = random.randrange(2**16 - 2) self._next_expected_seqnum = 0 self._next_delivered_seqnum = 0 self._segment_queue = collections.deque() self._sending_window = collections.OrderedDict() self._receive_heap = heap.Heap() self._looping_send = task.LoopingCall(self._dequeue_outbound_message) self._looping_receive = task.LoopingCall(self._pop_received_packet) REACTOR.callLater(0, self._send_syn) self._ack_handle = REACTOR.callLater(1, self._send_ack) self._ack_handle.cancel() @property def state(self): return self._state def set_relay_address(self, relay_addr): self.relay_addr = self._Address(*relay_addr) def send_message(self, message): for segment in self._gen_segments(message): self._segment_queue.append(segment) self._attempt_enabling_looping_send() def receive_packet(self, rudp_packet, from_addr): if self._state == State.SHUTDOWN: return if from_addr not in (rudp_packet.source_addr, self.relay_addr): self.set_relay_address(from_addr) if rudp_packet.fin: self._process_fin_packet(rudp_packet) elif rudp_packet.syn: if self._state == State.CONNECTING: self._process_syn_packet(rudp_packet) else: if self._state == State.CONNECTED: self._process_casual_packet(rudp_packet) def shutdown(self): self._state = State.SHUTDOWN self._send_fin() self._cancel_ack_timeout() self._attempt_disabling_looping_send(force=True) self._attempt_disabling_looping_receive() self._clear_sending_window() self.handler.handle_shutdown() def unregister(self): assert self.state == State.SHUTDOWN del self._proto[self.dest_addr] @staticmethod def _gen_segments(message): max_size = constants.UDP_SAFE_SEGMENT_SIZE count = (len(message) + max_size - 1) // max_size segments = ( (count - i - 1, message[i * max_size: (i + 1) * max_size]) for i in range(count) ) return segments def _attempt_enabling_looping_send(self): if ( not self._looping_send.running and self._state == State.CONNECTED and len(self._sending_window) < constants.WINDOW_SIZE and len(self._segment_queue) ): self._looping_send.start(0, now=True) def _attempt_disabling_looping_send(self, force=False): if ( self._looping_send.running and ( force or len(self._sending_window) >= constants.WINDOW_SIZE or not len(self._segment_queue) ) ): self._looping_send.stop() def _get_next_sequence_number(self): cur = self._next_sequence_number self._next_sequence_number += 1 return cur def _send_syn(self): syn_packet = packet.Packet.from_data( self._get_next_sequence_number(), self.dest_addr, self.own_addr, ack=self._next_expected_seqnum, syn=True ) self._schedule_send_in_order(syn_packet, constants.PACKET_TIMEOUT) def _send_ack(self): ack_packet = packet.Packet.from_data( 0, self.dest_addr, self.own_addr, ack=self._next_expected_seqnum ) self._schedule_send_out_of_order(ack_packet) def _send_fin(self): fin_packet = packet.Packet.from_data( 0, self.dest_addr, self.own_addr, ack=self._next_expected_seqnum, fin=True ) self._schedule_send_out_of_order(fin_packet) def _schedule_send_out_of_order(self, rudp_packet): final_packet = self._finalize_packet(rudp_packet) self._proto.send_datagram(final_packet, self.relay_addr) def _schedule_send_in_order(self, rudp_packet, timeout): final_packet = self._finalize_packet(rudp_packet) seqnum = rudp_packet.sequence_number timeout_cb = REACTOR.callLater(0, self._do_send_packet, seqnum) self._sending_window[seqnum] = self.ScheduledPacket( final_packet, timeout, timeout_cb, 0 ) def _dequeue_outbound_message(self): assert self._segment_queue, 'Looping send active despite empty queue.' more_fragments, message = self._segment_queue.popleft() rudp_packet = packet.Packet.from_data( self._get_next_sequence_number(), self.dest_addr, self.own_addr, message, more_fragments, ack=self._next_expected_seqnum ) self._schedule_send_in_order(rudp_packet, constants.PACKET_TIMEOUT) self._attempt_disabling_looping_send() def _finalize_packet(self, rudp_packet): return rudp_packet.to_bytes() def _do_send_packet(self, seqnum): sch_packet = self._sending_window[seqnum] if sch_packet.retries >= constants.MAX_RETRANSMISSIONS: self.shutdown() else: self._proto.send_datagram(sch_packet.rudp_packet, self.relay_addr) sch_packet.timeout_cb = REACTOR.callLater( sch_packet.timeout, self._do_send_packet, seqnum ) sch_packet.retries += 1 self._cancel_ack_timeout() def _reset_ack_timeout(self, timeout): if self._ack_handle.active(): self._ack_handle.reset(timeout) else: self._ack_handle = REACTOR.callLater(timeout, self._send_ack) def _cancel_ack_timeout(self): if self._ack_handle.active(): self._ack_handle.cancel() def _clear_sending_window(self): for sch_packet in self._sending_window.values(): if sch_packet.timeout_cb.active(): sch_packet.timeout_cb.cancel() self._sending_window.clear() def _process_ack_packet(self, rudp_packet): if self._sending_window: self._retire_packets_with_seqnum_up_to( min(rudp_packet.ack, self._next_sequence_number) ) def _process_fin_packet(self, rudp_packet): self.shutdown() def _process_casual_packet(self, rudp_packet): if rudp_packet.ack > 0: self._process_ack_packet(rudp_packet) seqnum = rudp_packet.sequence_number if seqnum > 0: self._reset_ack_timeout(constants.BARE_ACK_TIMEOUT) if seqnum >= self._next_expected_seqnum: self._receive_heap.push(rudp_packet) if seqnum == self._next_expected_seqnum: self._next_expected_seqnum += 1 self._attempt_enabling_looping_receive() def _process_syn_packet(self, rudp_packet): if rudp_packet.ack > 0: self._process_ack_packet(rudp_packet) self._update_next_expected_seqnum(rudp_packet.sequence_number) self._update_next_delivered_seqnum(rudp_packet.sequence_number) self._state = State.CONNECTED self._attempt_enabling_looping_send() def _update_next_expected_seqnum(self, seqnum): if self._next_expected_seqnum <= seqnum: self._next_expected_seqnum = seqnum + 1 def _update_next_delivered_seqnum(self, seqnum): if self._next_delivered_seqnum <= seqnum: self._next_delivered_seqnum = seqnum + 1 def _retire_packets_with_seqnum_up_to(self, acknum): if not self._sending_window: return lowest_seqnum = iter(self._sending_window).__next__() if acknum >= lowest_seqnum: for seqnum in range(lowest_seqnum, acknum): self._retire_scheduled_packet_with_seqnum(seqnum) self._attempt_enabling_looping_send() def _retire_scheduled_packet_with_seqnum(self, seqnum): sch_packet = self._sending_window.pop(seqnum) sch_packet.timeout_cb.cancel() def _attempt_enabling_looping_receive(self): if ( not self._looping_receive.running and self._state == State.CONNECTED and self._receive_heap ): self._looping_receive.start(0, now=True) def _attempt_disabling_looping_receive(self): if self._looping_receive.running: self._looping_receive.stop() def _pop_received_packet(self): fragments = self._receive_heap.pop_min_and_all_fragments() if fragments is None: self._attempt_disabling_looping_receive() else: last_seqnum = fragments[-1].sequence_number self._update_next_expected_seqnum(last_seqnum) self._update_next_delivered_seqnum(last_seqnum) payload = b''.join(f.payload for f in fragments) self.handler.receive_message(payload) if self._next_delivered_seqnum not in self._receive_heap: self._attempt_disabling_looping_receive() class Handler(object): __metaclass__ = abc.ABCMeta connection = None @abc.abstractmethod def __init__(self, *args, **kwargs): @abc.abstractmethod
MIT License
bueda/django-comrade
comrade/http/middleware.py
AcceptMiddleware._parse_accept_header
python
def _parse_accept_header(self, accept): result = [] for media_range in accept.split(","): parts = media_range.split(";") media_type = parts.pop(0) media_params = [] q = 1.0 for part in parts: (key, value) = part.lstrip().split("=", 1) if key == "q": q = float(value) else: media_params.append((key, value)) result.append((media_type, tuple(media_params), q)) result.sort(lambda x, y: -cmp(x[2], y[2])) return result
Parse the Accept header *accept*, returning a list with pairs of (media_type, q_value), ordered by q values.
https://github.com/bueda/django-comrade/blob/baaec70f2dd2d64692e56ac13df01e8f16b958df/comrade/http/middleware.py#L170-L188
from django.conf import settings from django.middleware.csrf import CsrfViewMiddleware from django.http import HttpResponsePermanentRedirect, get_host, HttpResponse from django.core.exceptions import PermissionDenied, ImproperlyConfigured from django.contrib.auth.views import redirect_to_login import re import itertools try: import piston.utils import piston.emitters except ImportError: pass from comrade.http import coerce_put_post from comrade.views.simple import direct_to_template import commonware.log logger = commonware.log.getLogger(__name__) _HTML_TYPES = ('text/html', 'application/xhtml+xml') _SUPPORTED_TRANSFORMS = ['PUT', 'DELETE'] _FORM_RE = re.compile(r'((<form\W[^>]*\bmethod=(\'|"|))(%s)((\'|"|)\b[^>]*>))' % '|'.join(_SUPPORTED_TRANSFORMS), re.IGNORECASE) _MIDDLEWARE_KEY = '_method' SSL = 'SSL' class HttpMethodsMiddleware(object): def __init__(self): if ('django.middleware.csrf.CsrfViewMiddleware' in settings.MIDDLEWARE_CLASSES): raise ImproperlyConfigured("To use CSRF protection with the " "HttpMethodsMiddleware, you muse use the " "MultiMethodCsrfViewMiddleware instead of Django's " "CsrfViewMiddleware.") def process_request(self, request): if request.POST and request.POST.has_key(_MIDDLEWARE_KEY): if request.POST[_MIDDLEWARE_KEY].upper() in _SUPPORTED_TRANSFORMS: request.method = request.POST[_MIDDLEWARE_KEY].upper() coerce_put_post(request) return None def process_response(self, request, response): if response['Content-Type'].split(';')[0] in _HTML_TYPES: idattributes = itertools.chain(("id='" + _MIDDLEWARE_KEY + "'",), itertools.repeat('')) def add_transform_field(match): return match.group(2) + "POST" + match.group(5) + "<div style='display:none;'>" + "<input type='hidden' " + idattributes.next() + " name='" + _MIDDLEWARE_KEY + "' value='" + match.group(4).upper() + "' /></div>" response.content = _FORM_RE.sub(add_transform_field, response.content) return response class MultiMethodCsrfViewMiddleware(CsrfViewMiddleware): def process_view(self, request, callback, callback_args, callback_kwargs): original_method = request.method if request.method not in ('GET', 'HEAD', 'OPTIONS'): request.method = 'POST' response = super(MultiMethodCsrfViewMiddleware, self).process_view( request, callback, callback_args, callback_kwargs) request.method = original_method return response class ForwardedSSLMiddleware(object): def process_request(self, request): request.is_secure = lambda: request.META.get( 'HTTP_X_FORWARDED_PROTO') == 'https' class SslRedirectMiddleware(object): def process_view(self, request, view_func, view_args, view_kwargs): secure = view_kwargs.pop(SSL, True) if settings.SSL_ENABLED and secure and not request.is_secure(): return self._redirect(request, secure) def _redirect(self, request, secure): protocol = 'https' if secure else 'http' url = "%s://%s%s" % (protocol, get_host(request), request.get_full_path()) if settings.DEBUG and request.method == 'POST': raise RuntimeError, """Django can't perform a SSL redirect while maintaining POST data. Structure your views so that redirects only occur during GETs.""" return HttpResponsePermanentRedirect(url) class PermissionRedirectMiddleware(object): def process_exception(self, request, exception): if isinstance(exception, PermissionDenied): if 'json' in request.accepted_types[0]: return HttpResponse(status=401) elif request.user.is_authenticated(): return direct_to_template(request, "403.html", {'message': str(exception)}, status=403) else: return redirect_to_login(request.path) class POSTDataMassageMiddleware(object): def process_request(self, request): if request.method in ('POST', 'PUT'): try: piston.utils.translate_mime(request) except piston.utils.MimerDataException: return piston.utils.rc.BAD_REQUEST else: request.multipart = piston.utils.Mimer(request).is_multipart() if not hasattr(request, 'data'): if request.method == 'POST': request.data = request.POST elif request.method == 'PUT': request.data = request.PUT class AcceptMiddleware(object):
MIT License
michaelkonobeev/adashift
author_code_base/nmt/nmt/model.py
BaseModel.eval
python
def eval(self, sess): assert self.mode == tf.contrib.learn.ModeKeys.EVAL output_tuple = EvalOutputTuple(eval_loss=self.eval_loss, predict_count=self.predict_count, batch_size=self.batch_size) return sess.run(output_tuple)
Execute eval graph.
https://github.com/michaelkonobeev/adashift/blob/bf86b021d42e922078a39246770f0f875300a6f3/author_code_base/nmt/nmt/model.py#L354-L360
from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import collections import numpy as np import tensorflow as tf from . import model_helper from .utils import iterator_utils from .utils import misc_utils as utils from .utils import vocab_utils from . import optimizer_all utils.check_tensorflow_version() __all__ = ["BaseModel", "Model"] class TrainOutputTuple(collections.namedtuple( "TrainOutputTuple", ("train_summary", "train_loss", "predict_count", "global_step", "word_count", "batch_size", "grad_norm", "learning_rate"))): pass class EvalOutputTuple(collections.namedtuple( "EvalOutputTuple", ("eval_loss", "predict_count", "batch_size"))): pass class InferOutputTuple(collections.namedtuple( "InferOutputTuple", ("infer_logits", "infer_summary", "sample_id", "sample_words"))): pass class BaseModel(object): def __init__(self, hparams, mode, iterator, source_vocab_table, target_vocab_table, reverse_target_vocab_table=None, scope=None, extra_args=None): self._set_params_initializer(hparams, mode, iterator, source_vocab_table, target_vocab_table, scope, extra_args) self.extract_encoder_layers = (hasattr(hparams, "extract_encoder_layers") and hparams.extract_encoder_layers) res = self.build_graph(hparams, scope=scope) if not self.extract_encoder_layers: self._set_train_or_infer(res, reverse_target_vocab_table, hparams) self.saver = tf.train.Saver( tf.global_variables(), max_to_keep=hparams.num_keep_ckpts) def _set_params_initializer(self, hparams, mode, iterator, source_vocab_table, target_vocab_table, scope, extra_args=None): assert isinstance(iterator, iterator_utils.BatchedInput) self.iterator = iterator self.mode = mode self.src_vocab_table = source_vocab_table self.tgt_vocab_table = target_vocab_table self.src_vocab_size = hparams.src_vocab_size self.tgt_vocab_size = hparams.tgt_vocab_size self.num_gpus = hparams.num_gpus self.time_major = hparams.time_major if hparams.use_char_encode: assert (not self.time_major), ("Can't use time major for" " char-level inputs.") self.dtype = tf.float32 self.num_sampled_softmax = hparams.num_sampled_softmax self.single_cell_fn = None if extra_args: self.single_cell_fn = extra_args.single_cell_fn self.num_units = hparams.num_units self.num_encoder_layers = hparams.num_encoder_layers self.num_decoder_layers = hparams.num_decoder_layers assert self.num_encoder_layers assert self.num_decoder_layers if hasattr(hparams, "num_residual_layers"): self.num_encoder_residual_layers = hparams.num_residual_layers self.num_decoder_residual_layers = hparams.num_residual_layers else: self.num_encoder_residual_layers = hparams.num_encoder_residual_layers self.num_decoder_residual_layers = hparams.num_decoder_residual_layers self.batch_size = tf.size(self.iterator.source_sequence_length) self.global_step = tf.Variable(0, trainable=False) self.random_seed = hparams.random_seed initializer = model_helper.get_initializer( hparams.init_op, self.random_seed, hparams.init_weight) tf.get_variable_scope().set_initializer(initializer) if extra_args and extra_args.encoder_emb_lookup_fn: self.encoder_emb_lookup_fn = extra_args.encoder_emb_lookup_fn else: self.encoder_emb_lookup_fn = tf.nn.embedding_lookup self.init_embeddings(hparams, scope) def _set_train_or_infer(self, res, reverse_target_vocab_table, hparams): if self.mode == tf.contrib.learn.ModeKeys.TRAIN: self.train_loss = res[1] self.word_count = tf.reduce_sum( self.iterator.source_sequence_length) + tf.reduce_sum( self.iterator.target_sequence_length) elif self.mode == tf.contrib.learn.ModeKeys.EVAL: self.eval_loss = res[1] elif self.mode == tf.contrib.learn.ModeKeys.INFER: self.infer_logits, _, self.final_context_state, self.sample_id = res self.sample_words = reverse_target_vocab_table.lookup( tf.to_int64(self.sample_id)) if self.mode != tf.contrib.learn.ModeKeys.INFER: self.predict_count = tf.reduce_sum( self.iterator.target_sequence_length) params = tf.trainable_variables() if self.mode == tf.contrib.learn.ModeKeys.TRAIN: self.learning_rate = tf.constant(hparams.learning_rate) self.learning_rate = self._get_learning_rate_warmup(hparams) self.learning_rate = self._get_learning_rate_decay(hparams) if hparams.optimizer == "sgd": opt = tf.train.GradientDescentOptimizer(self.learning_rate) elif hparams.optimizer == 'adam': opt = optimizer_all.Adam(learning_rate=self.learning_rate, beta1=hparams.beta1, beta2=hparams.beta2, epsilon=hparams.epsilon) elif hparams.optimizer == 'adaShift': opt = optimizer_all.AdaShift(learning_rate=self.learning_rate, keep_num=hparams.keep_num, beta1=hparams.beta1, beta2=hparams.beta2, epsilon=hparams.epsilon, pred_g_op=hparams.pred_g_op, use_mov=(hparams.use_mov==1), mov_num=hparams.mov_num) elif hparams.optimizer == "amsgrad": opt = optimizer_all.AMSGrad(learning_rate=self.learning_rate, beta1=hparams.beta1, beta2=hparams.beta2, epsilon=hparams.epsilon) else: assert 'No optimizer has been chosed, name may be wrong' gradients = tf.gradients( self.train_loss, params, colocate_gradients_with_ops=hparams.colocate_gradients_with_ops) clipped_grads, grad_norm_summary, grad_norm = model_helper.gradient_clip( gradients, max_gradient_norm=hparams.max_gradient_norm) self.grad_norm_summary = grad_norm_summary self.grad_norm = grad_norm self.update = opt.apply_gradients( zip(clipped_grads, params), global_step=self.global_step) self.train_summary = self._get_train_summary() elif self.mode == tf.contrib.learn.ModeKeys.INFER: self.infer_summary = self._get_infer_summary(hparams) utils.print_out("# Trainable variables") utils.print_out("Format: <name>, <shape>, <(soft) device placement>") for param in params: utils.print_out(" %s, %s, %s" % (param.name, str(param.get_shape()), param.op.device)) def _get_learning_rate_warmup(self, hparams): warmup_steps = hparams.warmup_steps warmup_scheme = hparams.warmup_scheme utils.print_out(" learning_rate=%g, warmup_steps=%d, warmup_scheme=%s" % (hparams.learning_rate, warmup_steps, warmup_scheme)) if warmup_scheme == "t2t": warmup_factor = tf.exp(tf.log(0.01) / warmup_steps) inv_decay = warmup_factor**( tf.to_float(warmup_steps - self.global_step)) else: raise ValueError("Unknown warmup scheme %s" % warmup_scheme) return tf.cond( self.global_step < hparams.warmup_steps, lambda: inv_decay * self.learning_rate, lambda: self.learning_rate, name="learning_rate_warump_cond") def _get_decay_info(self, hparams): if hparams.decay_scheme in ["luong5", "luong10", "luong234"]: decay_factor = 0.5 if hparams.decay_scheme == "luong5": start_decay_step = int(hparams.num_train_steps / 2) decay_times = 5 elif hparams.decay_scheme == "luong10": start_decay_step = int(hparams.num_train_steps / 2) decay_times = 10 elif hparams.decay_scheme == "luong234": start_decay_step = int(hparams.num_train_steps * 2 / 3) decay_times = 4 remain_steps = hparams.num_train_steps - start_decay_step decay_steps = int(remain_steps / decay_times) elif hparams.decay_scheme == "self": start_decay_step = 8000 decay_steps = 1000 decay_factor = 0.5 elif hparams.decay_scheme == "ming": start_decay_step = 8000 decay_steps = 3000 decay_factor = 0.5 elif not hparams.decay_scheme: start_decay_step = hparams.num_train_steps decay_steps = 0 decay_factor = 1.0 elif hparams.decay_scheme: raise ValueError("Unknown decay scheme %s" % hparams.decay_scheme) return start_decay_step, decay_steps, decay_factor def _get_learning_rate_decay(self, hparams): start_decay_step, decay_steps, decay_factor = self._get_decay_info(hparams) utils.print_out(" decay_scheme=%s, start_decay_step=%d, decay_steps %d, " "decay_factor %g" % (hparams.decay_scheme, start_decay_step, decay_steps, decay_factor)) return tf.cond( self.global_step < start_decay_step, lambda: self.learning_rate, lambda: tf.train.exponential_decay( self.learning_rate, (self.global_step - start_decay_step), decay_steps, decay_factor, staircase=True), name="learning_rate_decay_cond") def init_embeddings(self, hparams, scope): self.embedding_encoder, self.embedding_decoder = ( model_helper.create_emb_for_encoder_and_decoder( share_vocab=hparams.share_vocab, src_vocab_size=self.src_vocab_size, tgt_vocab_size=self.tgt_vocab_size, src_embed_size=self.num_units, tgt_embed_size=self.num_units, num_enc_partitions=hparams.num_enc_emb_partitions, num_dec_partitions=hparams.num_dec_emb_partitions, src_vocab_file=hparams.src_vocab_file, tgt_vocab_file=hparams.tgt_vocab_file, src_embed_file=hparams.src_embed_file, tgt_embed_file=hparams.tgt_embed_file, use_char_encode=hparams.use_char_encode, scope=scope,)) def _get_train_summary(self): train_summary = tf.summary.merge( [tf.summary.scalar("lr", self.learning_rate), tf.summary.scalar("train_loss", self.train_loss)] + self.grad_norm_summary) return train_summary def train(self, sess): assert self.mode == tf.contrib.learn.ModeKeys.TRAIN output_tuple = TrainOutputTuple(train_summary=self.train_summary, train_loss=self.train_loss, predict_count=self.predict_count, global_step=self.global_step, word_count=self.word_count, batch_size=self.batch_size, grad_norm=self.grad_norm, learning_rate=self.learning_rate) return sess.run([self.update, output_tuple])
MIT License
res2net/res2net-detectron2
detectron2/engine/defaults.py
default_argument_parser
python
def default_argument_parser(epilog=None): parser = argparse.ArgumentParser( epilog=epilog or f""" Examples: Run on single machine: $ {sys.argv[0]} --num-gpus 8 --config-file cfg.yaml MODEL.WEIGHTS /path/to/weight.pth Run on multiple machines: (machine0)$ {sys.argv[0]} --machine-rank 0 --num-machines 2 --dist-url <URL> [--other-flags] (machine1)$ {sys.argv[0]} --machine-rank 1 --num-machines 2 --dist-url <URL> [--other-flags] """, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file") parser.add_argument( "--resume", action="store_true", help="whether to attempt to resume from the checkpoint directory", ) parser.add_argument("--eval-only", action="store_true", help="perform evaluation only") parser.add_argument("--num-gpus", type=int, default=1, help="number of gpus *per machine*") parser.add_argument("--num-machines", type=int, default=1, help="total number of machines") parser.add_argument( "--machine-rank", type=int, default=0, help="the rank of this machine (unique per machine)" ) port = 2 ** 15 + 2 ** 14 + hash(os.getuid() if sys.platform != "win32" else 1) % 2 ** 14 parser.add_argument( "--dist-url", default="tcp://127.0.0.1:{}".format(port), help="initialization URL for pytorch distributed backend. See " "https://pytorch.org/docs/stable/distributed.html for details.", ) parser.add_argument( "opts", help="Modify config options using the command-line", default=None, nargs=argparse.REMAINDER, ) return parser
Create a parser with some common arguments used by detectron2 users. Args: epilog (str): epilog passed to ArgumentParser describing the usage. Returns: argparse.ArgumentParser:
https://github.com/res2net/res2net-detectron2/blob/3677895d5d23635b67837e64a79370b9ee117c27/detectron2/engine/defaults.py#L49-L102
import argparse import logging import os import sys from collections import OrderedDict import torch from fvcore.common.file_io import PathManager from fvcore.nn.precise_bn import get_bn_modules from torch.nn.parallel import DistributedDataParallel import detectron2.data.transforms as T from detectron2.checkpoint import DetectionCheckpointer from detectron2.data import ( MetadataCatalog, build_detection_test_loader, build_detection_train_loader, ) from detectron2.evaluation import ( DatasetEvaluator, inference_on_dataset, print_csv_format, verify_results, ) from detectron2.modeling import build_model from detectron2.solver import build_lr_scheduler, build_optimizer from detectron2.utils import comm from detectron2.utils.collect_env import collect_env_info from detectron2.utils.env import seed_all_rng from detectron2.utils.events import CommonMetricPrinter, JSONWriter, TensorboardXWriter from detectron2.utils.logger import setup_logger from . import hooks from .train_loop import SimpleTrainer __all__ = ["default_argument_parser", "default_setup", "DefaultPredictor", "DefaultTrainer"]
Apache License 2.0
wbond/oscrypto
oscrypto/_pkcs12.py
pkcs12_kdf
python
def pkcs12_kdf(hash_algorithm, password, salt, iterations, key_length, id_): if not isinstance(password, byte_cls): raise TypeError(pretty_message( ''' password must be a byte string, not %s ''', type_name(password) )) if not isinstance(salt, byte_cls): raise TypeError(pretty_message( ''' salt must be a byte string, not %s ''', type_name(salt) )) if not isinstance(iterations, int_types): raise TypeError(pretty_message( ''' iterations must be an integer, not %s ''', type_name(iterations) )) if iterations < 1: raise ValueError(pretty_message( ''' iterations must be greater than 0 - is %s ''', repr(iterations) )) if not isinstance(key_length, int_types): raise TypeError(pretty_message( ''' key_length must be an integer, not %s ''', type_name(key_length) )) if key_length < 1: raise ValueError(pretty_message( ''' key_length must be greater than 0 - is %s ''', repr(key_length) )) if hash_algorithm not in set(['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']): raise ValueError(pretty_message( ''' hash_algorithm must be one of "md5", "sha1", "sha224", "sha256", "sha384", "sha512", not %s ''', repr(hash_algorithm) )) if id_ not in set([1, 2, 3]): raise ValueError(pretty_message( ''' id_ must be one of 1, 2, 3, not %s ''', repr(id_) )) utf16_password = password.decode('utf-8').encode('utf-16be') + b'\x00\x00' algo = getattr(hashlib, hash_algorithm) u = { 'md5': 16, 'sha1': 20, 'sha224': 28, 'sha256': 32, 'sha384': 48, 'sha512': 64 }[hash_algorithm] if hash_algorithm in ['sha384', 'sha512']: v = 128 else: v = 64 d = chr_cls(id_) * v s = b'' if salt != b'': s_len = v * int(math.ceil(float(len(salt)) / v)) while len(s) < s_len: s += salt s = s[0:s_len] p = b'' if utf16_password != b'': p_len = v * int(math.ceil(float(len(utf16_password)) / v)) while len(p) < p_len: p += utf16_password p = p[0:p_len] i = s + p c = int(math.ceil(float(key_length) / u)) a = b'\x00' * (c * u) for num in range(1, c + 1): a2 = algo(d + i).digest() for _ in range(2, iterations + 1): a2 = algo(a2).digest() if num < c: b = b'' while len(b) < v: b += a2 b = int_from_bytes(b[0:v]) + 1 for num2 in range(0, len(i) // v): start = num2 * v end = (num2 + 1) * v i_num2 = i[start:end] i_num2 = int_to_bytes(int_from_bytes(i_num2) + b) i_num2_l = len(i_num2) if i_num2_l > v: i_num2 = i_num2[i_num2_l - v:] i = i[0:start] + i_num2 + i[end:] begin = (num - 1) * u to_copy = min(key_length, u) a = a[0:begin] + a2[0:to_copy] + a[begin + to_copy:] return a[0:key_length]
KDF from RFC7292 appendix b.2 - https://tools.ietf.org/html/rfc7292#page-19 :param hash_algorithm: The string name of the hash algorithm to use: "md5", "sha1", "sha224", "sha256", "sha384", "sha512" :param password: A byte string of the password to use an input to the KDF :param salt: A cryptographic random byte string :param iterations: The numbers of iterations to use when deriving the key :param key_length: The length of the desired key in bytes :param id_: The ID of the usage - 1 for key, 2 for iv, 3 for mac :return: The derived key as a byte string
https://github.com/wbond/oscrypto/blob/d40c62577706682a0f6da5616ad09964f1c9137d/oscrypto/_pkcs12.py#L26-L198
from __future__ import unicode_literals, division, absolute_import, print_function import sys import hashlib import math from ._asn1 import int_from_bytes, int_to_bytes from ._errors import pretty_message from ._types import type_name, byte_cls, int_types if sys.version_info < (3,): chr_cls = chr else: def chr_cls(num): return bytes([num]) __all__ = [ 'pkcs12_kdf', ]
MIT License
steph1793/pointer_transformer_generator
data_helper.py
Data_Helper.get_dec_inp_targ_seqs
python
def get_dec_inp_targ_seqs( sequence, max_len, start_id, stop_id): inp = [start_id] + sequence[:] target = sequence[:] if len(inp) > max_len: inp = inp[:max_len] target = target[:max_len] else: target.append(stop_id) assert len(inp) == len(target) return inp, target
Given the reference summary as a sequence of tokens, return the input sequence for the decoder, and the target sequence which we will use to calculate loss. The sequence will be truncated if it is longer than max_len. The input sequence must start with the start_id and the target sequence must end with the stop_id (but not if it's been truncated). Args: sequence: List of ids (integers) max_len: integer start_id: integer stop_id: integer Returns: inp: sequence length <=max_len starting with start_id target: sequence same length as input, ending with stop_id only if there was no truncation
https://github.com/steph1793/pointer_transformer_generator/blob/5f562b6166a861692479c5654365a74a614d0634/data_helper.py#L126-L146
class Vocab: SENTENCE_START = '<s>' SENTENCE_END = '</s>' PAD_TOKEN = '[PAD]' UNKNOWN_TOKEN = '[UNK]' START_DECODING = '[START]' STOP_DECODING = '[STOP]' def __init__(self, vocab_file, max_size): self.word2id = {Vocab.UNKNOWN_TOKEN : 0, Vocab.PAD_TOKEN : 1, Vocab.START_DECODING : 2, Vocab.STOP_DECODING : 3} self.id2word = {0 : Vocab.UNKNOWN_TOKEN, 1 : Vocab.PAD_TOKEN, 2 : Vocab.START_DECODING, 3 : Vocab.STOP_DECODING} self.count = 4 with open(vocab_file, 'r') as f: for line in f: pieces = line.split() if len(pieces) != 2 : print('Warning : incorrectly formatted line in vocabulary file : %s\n' % line) continue w = pieces[0] if w in [Vocab.SENTENCE_START, Vocab.SENTENCE_END, Vocab.UNKNOWN_TOKEN, Vocab.PAD_TOKEN, Vocab.START_DECODING, Vocab.STOP_DECODING]: raise Exception('<s>, </s>, [UNK], [PAD], [START] and [STOP] shouldn\'t be in the vocab file, but %s is' % w) if w in self.word2id: raise Exception('Duplicated word in vocabulary file: %s' % w) self.word2id[w] = self.count self.id2word[self.count] = w self.count += 1 if max_size != 0 and self.count >= max_size: print("max_size of vocab was specified as %i; we now have %i words. Stopping reading." % (max_size, self.count)) break print("Finished constructing vocabulary of %i total words. Last word added: %s" % (self.count, self.id2word[self.count-1])) def word_to_id(self, word): if word not in self.word2id: return self.word2id[Vocab.UNKNOWN_TOKEN] return self.word2id[word] def id_to_word(self, word_id): if word_id not in self.id2word: raise ValueError('Id not found in vocab: %d' % word_id) return self.id2word[word_id] def size(self): return self.count class Data_Helper: def article_to_ids(article_words, vocab): ids = [] oovs = [] unk_id = vocab.word_to_id(vocab.UNKNOWN_TOKEN) for w in article_words: i = vocab.word_to_id(w) if i == unk_id: if w not in oovs: oovs.append(w) oov_num = oovs.index(w) ids.append(vocab.size() + oov_num) else: ids.append(i) return ids, oovs def abstract_to_ids(abstract_words, vocab, article_oovs): ids = [] unk_id = vocab.word_to_id(vocab.UNKNOWN_TOKEN) for w in abstract_words: i = vocab.word_to_id(w) if i == unk_id: if w in article_oovs: vocab_idx = vocab.size() + article_oovs.index(w) ids.append(vocab_idx) else: ids.append(unk_id) else: ids.append(i) return ids def output_to_words(id_list, vocab, article_oovs): words = [] for i in id_list: try: w = vocab.id_to_word(i) except ValueError as e: assert article_oovs is not None, "Error: model produced a word ID that isn't in the vocabulary. This should not happen in baseline (no pointer-generator) mode" article_oov_idx = i - vocab.size() try: w = article_oovs[article_oov_idx] except ValueError as e: raise ValueError('Error: model produced word ID %i which corresponds to article OOV %i but this example only has %i article OOVs' % (i, article_oov_idx, len(article_oovs))) words.append(w) return words def abstract_to_sents(abstract): cur = 0 sents = [] while True: try: start_p = abstract.index(Vocab.SENTENCE_START, cur) end_p = abstract.index(Vocab.SENTENCE_END, start_p + 1) cur = end_p + len(Vocab.SENTENCE_END) sents.append(abstract[start_p+len(Vocab.SENTENCE_START):end_p]) except ValueError as e: return sents
MIT License
yggdroot/leaderf
autoload/leaderf/python/leaderf/gtagsExpl.py
GtagsExplorer._exists
python
def _exists(self, path, dir): if os.name == 'nt': root = os.path.splitdrive(os.path.abspath(path))[0] + os.sep else: root = '/' while os.path.abspath(path) != root: cur_dir = os.path.join(path, dir) if os.path.exists(cur_dir) and os.path.isdir(cur_dir): return True path = os.path.join(path, "..") cur_dir = os.path.join(path, dir) if os.path.exists(cur_dir) and os.path.isdir(cur_dir): return True return False
return True if `dir` exists in `path` or its ancestor path, otherwise return False
https://github.com/yggdroot/leaderf/blob/869a4005f63eba5a9e6e175bca0c50d73be48e2d/autoload/leaderf/python/leaderf/gtagsExpl.py#L606-L627
import vim import re import os import os.path import shutil import itertools import subprocess from .utils import * from .explorer import * from .manager import * if sys.version_info >= (3, 0): import queue as Queue else: import Queue class GtagsExplorer(Explorer): def __init__(self): self._executor = [] self._pattern_regex = [] if os.name == 'nt': self._cd_option = '/d ' else: self._cd_option = '' self._root_markers = lfEval("g:Lf_RootMarkers") self._db_location = os.path.join(lfEval("g:Lf_CacheDirectory"), '.LfCache', 'gtags') self._store_in_project = lfEval("get(g:, 'Lf_GtagsStoreInProject', 0)") == '1' self._store_in_rootmarker = lfEval("get(g:, 'Lf_GtagsStoreInRootMarker', 0)") == '1' self._project_root = "" self._gtagslibpath = [] self._result_format = None self._last_result_format = None self._evalVimVar() self._has_nvim = lfEval("has('nvim')") == '1' self._db_timestamp = 0 self._last_command = "" self._content = [] self._with_gutentags = lfEval("get(g:, 'Lf_GtagsGutentags', 0)") != '0' self._is_debug = False self._cmd = '' self._task_queue = Queue.Queue() self._worker_thread = threading.Thread(target=self._processTask) self._worker_thread.daemon = True self._worker_thread.start() def __del__(self): self._task_queue.put(None) self._worker_thread.join() def _processTask(self): while True: try: task = self._task_queue.get() if task is None: break task() except Exception as e: print(e) def setContent(self, content): if self._last_command == "--all": self._content = content def getContent(self, *args, **kwargs): arguments_dict = kwargs.get("arguments", {}) self._is_debug = "--debug" in arguments_dict if "--recall" in arguments_dict: return [] if vim.current.buffer.name: filename = os.path.normpath(lfDecode(vim.current.buffer.name)) else: filename = os.path.join(lfGetCwd(), 'no_name') if "--gtagsconf" in arguments_dict: self._gtagsconf = arguments_dict["--gtagsconf"][0] if "--gtagslabel" in arguments_dict: self._gtagslabel = arguments_dict["--gtagslabel"][0] if self._gtagsconf == '' and os.name == 'nt': self._gtagsconf = os.path.normpath(os.path.join(self._which("gtags.exe"), "..", "share", "gtags", "gtags.conf")).join('""') if "--gtagslibpath" in arguments_dict: self._gtagslibpath = [os.path.abspath(os.path.expanduser(p)) for p in arguments_dict["--gtagslibpath"]] for i in self._gtagslibpath: if not os.path.exists(i): print("`%s` does not exist!" % i) else: self._gtagslibpath = [] if "--update" in arguments_dict: self._evalVimVar() if "--accept-dotfiles" in arguments_dict: self._accept_dotfiles = "--accept-dotfiles " if "--skip-unreadable" in arguments_dict: self._skip_unreadable = "--skip-unreadable " if "--skip-symlink" in arguments_dict and self._skip_symlink != "": skip_symlink = arguments_dict["--skip-symlink"] self._skip_symlink = "--skip-symlink%s " % ('=' + skip_symlink[0] if skip_symlink else "") self.updateGtags(filename, single_update=False, auto=False) return elif "--remove" in arguments_dict: self._remove(filename) return if "--path-style" in arguments_dict: path_style = "--path-style %s " % arguments_dict["--path-style"][0] else: path_style = "" auto_jump = False self._last_result_format = self._result_format self._result_format = None if "-d" in arguments_dict: pattern = arguments_dict["-d"][0] pattern_option = "-d -e %s " % pattern if "--auto-jump" in arguments_dict: auto_jump = True elif "-r" in arguments_dict: pattern = arguments_dict["-r"][0] pattern_option = "-r -e %s " % pattern if "--auto-jump" in arguments_dict: auto_jump = True elif "-s" in arguments_dict: pattern = arguments_dict["-s"][0] pattern_option = "-s -e %s " % pattern elif "-g" in arguments_dict: pattern = arguments_dict["-g"][0] pattern_option = "-g -e %s " % pattern elif "--by-context" in arguments_dict: pattern = lfEval('expand("<cword>")') pattern_option = '--from-here "%d:%s" %s ' % (vim.current.window.cursor[0], vim.current.buffer.name, pattern) if "--auto-jump" in arguments_dict: auto_jump = True else: if "--current-buffer" in arguments_dict: pattern_option = '-f "%s" -q' % vim.current.buffer.name elif "--all-buffers" in arguments_dict: pattern_option = '-f "%s" -q' % '" "'.join(b.name for b in vim.buffers) else: pattern_option = None root, dbpath, exists = self._root_dbpath(filename) if not filename.startswith(root): libdb = os.path.join(dbpath, "GTAGSLIBPATH") if os.path.exists(libdb): with lfOpen(libdb, 'r', errors='ignore') as f: for line in f: tmp_root, tmp_dbpath = line.rstrip().split('\t', 1) if filename.startswith(tmp_root): root = tmp_root dbpath = tmp_dbpath break if "--result" in arguments_dict: self._result_format = arguments_dict["--result"][0] else: self._result_format = "ctags" env = os.environ env["GTAGSROOT"] = root env["GTAGSDBPATH"] = dbpath if pattern_option is None: cmd = 'global -P | global -L- -f {}--gtagslabel={} {}--color=never --result={}'.format( '--gtagsconf %s ' % self._gtagsconf if self._gtagsconf else "", self._gtagslabel, path_style, self._result_format) else: cmd = 'global {}--gtagslabel={} {} {}--color=never --result={}'.format( '--gtagsconf %s ' % self._gtagsconf if self._gtagsconf else "", self._gtagslabel, pattern_option, path_style, self._result_format) if not self._isDBModified(os.path.join(dbpath, 'GTAGS')) and self._content and self._cmd == cmd: return self._content self._cmd = cmd executor = AsyncExecutor() self._executor.append(executor) lfCmd("let g:Lf_Debug_GtagsCmd = '%s'" % escQuote(cmd)) self._last_command = "--all" content = executor.execute(cmd, env=env, raise_except=False) return content if "-S" in arguments_dict: scope = "--scope %s " % os.path.abspath(arguments_dict["-S"][0]) else: scope = "" if "--literal" in arguments_dict: literal = "--literal " else: literal = "" if "-i" in arguments_dict: ignorecase = "-i " else: ignorecase = "" if "--append" not in arguments_dict or self._last_result_format is not None: self._pattern_regex = [] if ignorecase: case_pattern = r'\c' else: case_pattern = r'\C' if len(pattern) > 1 and (pattern[0] == pattern[-1] == '"' or pattern[0] == pattern[-1] == "'"): p = pattern[1:-1] else: p = pattern if literal: if len(pattern) > 1 and pattern[0] == pattern[-1] == '"': p = re.sub(r'\\(?!")', r'\\\\', p) else: p = p.replace('\\', r'\\') self._pattern_regex.append(r'\V' + case_pattern + p) else: if "-g" not in arguments_dict: vim_regex = self.translateRegex(case_pattern + p.join([r'\b', r'\b'])) vim_regex = vim_regex.replace('.', r'\w') else: vim_regex = self.translateRegex(case_pattern + p) self._pattern_regex.append(vim_regex) root, dbpath, exists = self._root_dbpath(filename) env = os.environ env["GTAGSROOT"] = root env["GTAGSDBPATH"] = dbpath cmd = 'global {}--gtagslabel={} {} {}{}{}{}--color=never --result=ctags-mod'.format( '--gtagsconf %s ' % self._gtagsconf if self._gtagsconf else "", self._gtagslabel, pattern_option, path_style, scope, literal, ignorecase) executor = AsyncExecutor() self._executor.append(executor) lfCmd("let g:Lf_Debug_GtagsCmd = '%s'" % escQuote(cmd)) self._last_command = "others" content = executor.execute(cmd, env=env) libdb = os.path.join(dbpath, "GTAGSLIBPATH") if os.path.exists(libdb): with lfOpen(libdb, 'r', errors='ignore') as f: for line in f: root, dbpath = line.rstrip().split('\t', 1) env = os.environ env["GTAGSROOT"] = root env["GTAGSDBPATH"] = dbpath if path_style == "--path-style abslib ": path_style = "--path-style absolute " cmd = 'global {}--gtagslabel={} {} {}{}{}{}--color=never --result=ctags-mod -q'.format( '--gtagsconf %s ' % self._gtagsconf if self._gtagsconf else "", self._gtagslabel, pattern_option, path_style, scope, literal, ignorecase) executor = AsyncExecutor() self._executor.append(executor) content += executor.execute(cmd, env=env) if auto_jump: first_two = list(itertools.islice(content, 2)) if len(first_two) == 1: return first_two else: return content.join_left(first_two) return content def translateRegex(self, regex, is_perl=False): vim_regex = regex vim_regex = re.sub(r'([%@&])', r'\\\1', vim_regex) vim_regex = re.sub(r'(?<!\\)\*\?', r'{-}', vim_regex) vim_regex = re.sub(r'(?<!\\)\+\?', r'{-1,}', vim_regex) vim_regex = re.sub(r'(?<!\\)\?\?', r'{-0,1}', vim_regex) vim_regex = re.sub(r'(?<!\\)\{(.*?)\}\?', r'{-\1}', vim_regex) if is_perl: vim_regex = re.sub(r'(?<!\\)([*+?}])\+', r'\1', vim_regex) vim_regex = re.sub(r'\(\?#.*?\)', r'', vim_regex) vim_regex = re.sub(r'\(\?=(.+?)\)', r'(\1)@=', vim_regex) vim_regex = re.sub(r'\(\?!(.+?)\)', r'(\1)@!', vim_regex) vim_regex = re.sub(r'\(\?<=(.+?)\)', r'(\1)@<=', vim_regex) vim_regex = re.sub(r'\(\?<!(.+?)\)', r'(\1)@<!', vim_regex) vim_regex = re.sub(r'\(\?>(.+?)\)', r'(\1)@>', vim_regex) vim_regex = vim_regex.replace(r'\A', r'^') vim_regex = vim_regex.replace(r'\z', r'$') vim_regex = vim_regex.replace(r'\B', r'') vim_regex = re.sub(r'\\b', r'(<|>)', vim_regex) vim_regex = vim_regex.replace(r'(?i)', r'\c') vim_regex = vim_regex.replace(r'(?-i)', r'\C') vim_regex = re.sub(r'(?<=\()\?P<\w+>', r'', vim_regex) vim_regex = re.sub(r'\(\?:(.+?)\)', r'%(\1)', vim_regex) vim_regex = vim_regex.replace(r'\a', r'%x07') vim_regex = vim_regex.replace(r'\f', r'%x0C') vim_regex = vim_regex.replace(r'\v', r'%x0B') vim_regex = re.sub(r'\\(x[0-9A-Fa-f][0-9A-Fa-f])', r'%\1', vim_regex) vim_regex = re.sub(r'\\([uU])', r'%\1', vim_regex) vim_regex = re.sub(r'\[\[:ascii:\]\]', r'[\\x00-\\x7F]', vim_regex) vim_regex = re.sub(r'\[\[:word:\]\]', r'[0-9A-Za-z_]', vim_regex) vim_regex = vim_regex.replace(r'[[:^alnum:]]', r'[^0-9A-Za-z]') vim_regex = vim_regex.replace(r'[[:^alpha:]]', r'[^A-Za-z]') vim_regex = vim_regex.replace(r'[[:^ascii:]]', r'[^\x00-\x7F]') vim_regex = vim_regex.replace(r'[[:^blank:]]', r'[^\t ]') vim_regex = vim_regex.replace(r'[[:^cntrl:]]', r'[^\x00-\x1F\x7F]') vim_regex = vim_regex.replace(r'[[:^digit:]]', r'[^0-9]') vim_regex = vim_regex.replace(r'[[:^graph:]]', r'[^!-~]') vim_regex = vim_regex.replace(r'[[:^lower:]]', r'[^a-z]') vim_regex = vim_regex.replace(r'[[:^print:]]', r'[^ -~]') vim_regex = vim_regex.replace(r'[[:^punct:]]', r'[^!-/:-@\[-`{-~]') vim_regex = vim_regex.replace(r'[[:^space:]]', r'[^\t\n\r ]') vim_regex = vim_regex.replace(r'[[:^upper:]]', r'[^A-Z]') vim_regex = vim_regex.replace(r'[[:^word:]]', r'[^0-9A-Za-z_]') vim_regex = vim_regex.replace(r'[[:^xdigit:]]', r'[^0-9A-Fa-f]') return r'\v' + vim_regex def _nearestAncestor(self, markers, path): if os.name == 'nt': root = os.path.splitdrive(os.path.abspath(path))[0] + os.sep else: root = '/' path = os.path.abspath(path) while path != root: for name in markers: if os.path.exists(os.path.join(path, name)): return path path = os.path.abspath(os.path.join(path, "..")) for name in markers: if os.path.exists(os.path.join(path, name)): return path return "" def _isVersionControl(self, filename): if self._project_root and filename.startswith(self._project_root): return True ancestor = self._nearestAncestor(self._root_markers, os.path.dirname(filename)) if ancestor: self._project_root = ancestor return True else: return False def _generateDbpath(self, path): sep_char = '-' if self._with_gutentags else '_' if os.name == 'nt': if self._with_gutentags: db_folder = re.sub(r'[:\\/]', sep_char, path) else: db_folder = re.sub(r'[\\/]', sep_char, path.replace(':\\', sep_char, 1)) else: if self._with_gutentags: db_folder = path[1:].replace('/', sep_char) else: db_folder = path.replace('/', sep_char) if self._store_in_project: return path elif self._store_in_rootmarker: for name in self._root_markers: if os.path.exists(os.path.join(path, name)): return os.path.join(path, name, '.LfGtags') return os.path.join(path, '.LfGtags') else: return os.path.join(self._db_location, db_folder) def _root_dbpath(self, filename): if self._project_root and filename.startswith(self._project_root): root = self._project_root else: ancestor = self._nearestAncestor(self._root_markers, os.path.dirname(filename)) if ancestor: self._project_root = ancestor root = self._project_root else: ancestor = self._nearestAncestor(self._root_markers, lfGetCwd()) if ancestor: self._project_root = ancestor root = self._project_root else: root = lfGetCwd() dbpath = self._generateDbpath(root) return (root, dbpath, os.path.exists(os.path.join(dbpath, "GTAGS"))) def updateGtags(self, filename, single_update, auto): self._task_queue.put(partial(self._update, filename, single_update, auto)) def _isDBModified(self, dbpath): try: if self._db_timestamp == os.path.getmtime(dbpath): return False else: self._db_timestamp = os.path.getmtime(dbpath) return True except: return True def _remove(self, filename): if filename == "": return root, dbpath, exists = self._root_dbpath(filename) try: lfCmd("echohl Question") if self._store_in_project: if lfEval('input("Are you sure you want to remove GTAGS files?[Ny] ")') in ["Y","y"]: os.remove(os.path.join(dbpath, "GTAGS")) os.remove(os.path.join(dbpath, "GPATH")) os.remove(os.path.join(dbpath, "GRTAGS")) if os.path.exists(os.path.join(dbpath, "GTAGSLIBPATH")): os.remove(os.path.join(dbpath, "GTAGSLIBPATH")) elif lfEval('input("Are you sure you want to remove directory `{}`?[Ny] ")'.format(lfEncode(dbpath.replace('\\', r'\\')))) in ["Y","y"]: shutil.rmtree(dbpath) lfCmd("redraw | echo 'Done!'") except Exception: lfPrintTraceback() finally: lfCmd("echohl NONE") def _update(self, filename, single_update, auto): if filename == "": return if self._gtagsconf == '' and os.name == 'nt': self._gtagsconf = os.path.normpath(os.path.join(self._which("gtags.exe"), "..", "share", "gtags", "gtags.conf")).join('""') root, dbpath, exists = self._root_dbpath(filename) if not filename.startswith(root): return self._updateLibGtags(root, dbpath) if single_update: if exists: cmd = 'cd {}"{}" && gtags {}{}{}{}--gtagslabel {} --single-update "{}" "{}"'.format(self._cd_option, root, self._accept_dotfiles, self._skip_unreadable, self._skip_symlink, '--gtagsconf %s ' % self._gtagsconf if self._gtagsconf else "", self._gtagslabel, filename, dbpath) env = os.environ proc = subprocess.Popen(cmd, shell=True, universal_newlines=True, stderr=subprocess.PIPE, env=env) _, error = proc.communicate() elif not auto: self._executeCmd(root, dbpath) elif self._isVersionControl(filename): if not exists: self._executeCmd(root, dbpath) def _updateLibGtags(self, root, dbpath): if not self._gtagslibpath: return if not os.path.exists(dbpath): os.makedirs(dbpath) libpaths = ["%s\t%s\n" % (p, self._generateDbpath(p)) for p in self._gtagslibpath if os.path.exists(p) and p != root] if libpaths: libdb = os.path.join(dbpath, "GTAGSLIBPATH") with lfOpen(libdb, 'w', errors='ignore') as f: f.writelines(libpaths) if self._gtagsconf == '' and os.name == 'nt': self._gtagsconf = os.path.normpath(os.path.join(self._which("gtags.exe"), "..", "share", "gtags", "gtags.conf")).join('""') env = os.environ for path in self._gtagslibpath: if not os.path.exists(path): continue libdbpath = self._generateDbpath(path) if not os.path.exists(libdbpath): os.makedirs(libdbpath) cmd = 'cd {}"{}" && gtags -i {}{}{}{}--gtagslabel {} "{}"'.format(self._cd_option, path, self._accept_dotfiles, self._skip_unreadable, self._skip_symlink, '--gtagsconf %s ' % self._gtagsconf if self._gtagsconf else "", self._gtagslabel, libdbpath) proc = subprocess.Popen(cmd, shell=True, universal_newlines=True, stderr=subprocess.PIPE, env=env) _, error = proc.communicate() def _which(self, executable): for p in os.environ["PATH"].split(";"): if os.path.exists(os.path.join(p, executable)): return p return "" def _evalVimVar(self): self._accept_dotfiles = "--accept-dotfiles " if lfEval("get(g:, 'Lf_GtagsAcceptDotfiles', '0')") == '1' else "" self._skip_unreadable = "--skip-unreadable " if lfEval("get(g:, 'Lf_GtagsSkipUnreadable', '0')") == '1' else "" self._skip_symlink = "--skip-symlink%s " % ('=' + lfEval("get(g:, 'Lf_GtagsSkipSymlink', '')") if lfEval("get(g:, 'Lf_GtagsSkipSymlink', '')") != '' else "") if lfEval("get(g:, 'Lf_GtagsHigherThan6_6_2', '1')") == '0': self._skip_symlink = "" self._gtagsconf = lfEval("get(g:, 'Lf_Gtagsconf', '')") if self._gtagsconf: self._gtagsconf = self._gtagsconf.join('""') self._gtagslabel = lfEval("get(g:, 'Lf_Gtagslabel', 'default')") self._Lf_GtagsSource = int(lfEval("get(g:, 'Lf_GtagsSource', 0)")) if self._Lf_GtagsSource not in [0, 1, 2]: self._Lf_GtagsSource = 0 if self._Lf_GtagsSource != 1: if self._Lf_GtagsSource == 2: self._Lf_GtagsfilesCmd = lfEval("g:Lf_GtagsfilesCmd") return if lfEval("exists('g:Lf_ExternalCommand')") == '1': self._Lf_ExternalCommand = lfEval("g:Lf_ExternalCommand") return else: self._Lf_ExternalCommand = None self._Lf_UseVersionControlTool = lfEval("g:Lf_UseVersionControlTool") == '1' self._Lf_WildIgnore = lfEval("g:Lf_WildIgnore") self._Lf_RecurseSubmodules = lfEval("get(g:, 'Lf_RecurseSubmodules', 0)") == '1' if lfEval("exists('g:Lf_DefaultExternalTool')") == '1': self._default_tool = {"rg": 0, "pt": 0, "ag": 0, "find": 0} tool = lfEval("g:Lf_DefaultExternalTool") if tool and lfEval("executable('%s')" % tool) == '0': raise Exception("executable '%s' can not be found!" % tool) self._default_tool[tool] = 1 else: self._default_tool = {"rg": 1, "pt": 1, "ag": 1, "find": 1} self._is_rg_executable = lfEval("executable('rg')") == '1' self._Lf_ShowHidden = lfEval("g:Lf_ShowHidden") != '0' self._Lf_FollowLinks = lfEval("g:Lf_FollowLinks") == '1' self._is_pt_executable = lfEval("executable('pt')") == '1' self._is_ag_executable = lfEval("executable('ag')") == '1' self._is_find_executable = lfEval("executable('find')") == '1'
Apache License 2.0
nervanasystems/ngraph-neon
src/neon/util/trace_events.py
TraceEventTracker.__init__
python
def __init__(self, tracker_name): self.name = tracker_name self.events = []
Construct a new Trace Event Tracker
https://github.com/nervanasystems/ngraph-neon/blob/8988ab90ee81c8b219ea5c374702e56d7f383302/src/neon/util/trace_events.py#L28-L31
import json import os def is_tracing_enabled(): return ('TRACING' in os.environ and os.environ['TRACING'] == '1') class TraceEventTracker(object):
Apache License 2.0
fniessink/next-action
tests/features/steps/show_next_action.py
check_reference
python
def check_reference(context): assert_in("/", context.next_action())
Check the filename reference.
https://github.com/fniessink/next-action/blob/292832886371abf46569b3fbcec7b2da9550ca92/tests/features/steps/show_next_action.py#L234-L236
import argparse import datetime import tempfile from asserts import assert_equal, assert_in, assert_regex, assert_not_in, assert_true from behave import given, when, then from next_action.output import colorize def relative_date(days: int) -> str: return (datetime.date.today() + datetime.timedelta(days=days)).isoformat() def today() -> str: return relative_date(0) def tomorrow() -> str: return relative_date(1) def yesterday() -> str: return relative_date(-1) @given("an empty todo.txt") def empty_todotxt(context): context.files.append(tempfile.NamedTemporaryFile(mode="w")) context.arguments.extend(["--file", context.files[-1].name]) @given("an unreadable todo.txt") def unreadable_todotxt(context): context.execute_steps("given an empty todo.txt") context.files[-1].close() @given("a todo.txt with") def todotxt(context): context.execute_steps("given an empty todo.txt") context.files[-1].write(context.text.format(tomorrow=tomorrow(), yesterday=yesterday())) context.files[-1].seek(0) @given("a todo.txt named {filename} with") def named_todotxt(context, filename): context.execute_steps(f'given a todo.txt with\n"""\n{context.text}\n"""') del context.arguments[-2:] context.files[-1].given_filename = filename @when("the user asks for the next action") def next_action(context): @when("the user asks for the next action from {filename}") def next_action_from_file(context, filename): real_filename = [file.name for file in context.files if file.given_filename == filename][0] context.arguments.extend(["--file", real_filename]) @when("the user asks for the next action due {due_date}") def next_action_due(context, due_date): context.arguments.extend(["--due", due_date]) @when("the user asks for the next action over due") def next_action_over_due(context): context.arguments.extend(["--overdue"]) @when("the user asks for all next actions with at least priority A") def next_action_with_min_prio(context): context.arguments.extend(["--all", "--priority", "A"]) @when("the user asks for all next actions with a priority") def next_action_with_a_prio(context): context.arguments.extend(["--all", "--priority"]) @when("the user asks for all next actions with an invalid priority") def next_action_with_invalid_prio(context): context.arguments.extend(["--all", "--priority", "1"]) @when("the user asks for all next actions ungrouped") def next_action_ungrouped(context): context.arguments.extend(["--all", "--groupby"]) @when("the user asks for all next actions grouped by {groupby}") def next_action_groupby(context, groupby): context.arguments.extend(["--all", "--groupby", groupby.replace("due date", "duedate")]) @when("the user asks for the blocked tasks") def next_action_with_blocked_tasks(context): context.arguments.append("--blocked") @when("the user asks for the next action with the style {style}") def next_action_with_a_style(context, style): context.arguments.extend(["--style", style]) @when('the user asks for the next action with argument "{argument}"') def next_action_with_invalid_arg(context, argument): if not argument.startswith("-"): context.arguments.append("--") context.arguments.append(argument) @when("the user asks for the line number to be referenced") def next_action_ref_line_number(context): context.arguments.append("--line-number") @when("the user asks for the source file to be referenced {reference}") def next_action_ref_always(context, reference): context.arguments.extend(["--reference", reference]) @when("the user asks for the next action at {contexts}") def next_action_at_context(context, contexts): contexts = contexts.split(" and at ") context.arguments.extend([f"@{c}" for c in contexts]) @when("the user asks for the next action with an invalid {argument_type}") def next_action_invalid_argument(context, argument_type): if "due date" in argument_type: arguments = ["--due", "2018-02-30"] else: argument = "@" if "context" in argument_type else "+" arguments = [f"-{argument}" if "excluded" in argument_type else argument] context.arguments.extend(arguments) @when("the user asks for the next action with a due date with extra tokens") def next_action_extra_tokens(context): context.arguments.extend(["--due", "extra 2018-01-01"]) @when("the user asks for the next action with a {context_or_project} that is both included and excluded") def next_action_c_or_p_in_and_ex(context, context_or_project): argument_type = "@" if "context" in context_or_project else "+" context.arguments.extend([f"{argument_type}name", f"-{argument_type}name"]) @when("the user asks for the next action not at {contexts}") def next_action_not_at_context(context, contexts): contexts = contexts.split(" and not at ") context.arguments.extend([f"-@{c}" for c in contexts]) @when("the user asks for the next action for {projects}") def next_action_for_project(context, projects): projects = projects.split(" or for ") context.arguments.extend([f"+{p}" for p in projects]) @when("the user asks for the next action not for {projects}") def next_action_not_for_project(context, projects): projects = projects.split(" and not for ") context.arguments.extend([f"-+{p}" for p in projects]) @when("the user asks for {number} next actions") def ask_next_actions(context, number): context.arguments.extend(["--all"] if number == "all" else ["--number", str(number)]) @when("the user asks for the list of {argument_type}") def ask_for_list_of_arguments(context, argument_type): if argument_type.endswith(" arguments"): argument_type = argument_type[:-len(" arguments")] context.arguments.extend(["--list-arguments", f"{argument_type.replace(' ', '_').replace('-', '_')}"]) @when("the user asks for the next action with the open urls option") def next_action_open_url(context): context.arguments.append("--open-urls") @then("Next-action tells the user there's nothing to do") def nothing_todo(context): assert_in("Nothing to do!", context.next_action()) @then("Next-action references the line number of the next action") def check_line_number(context): assert_in("1]", context.next_action()) @then("Next-action references the source file of the next action")
Apache License 2.0
svn2github/gyp
pylib/gyp/generator/ninja.py
NinjaWriter.WriteSpec
python
def WriteSpec(self, spec, config_name, generator_flags): self.config_name = config_name self.name = spec['target_name'] self.toolset = spec['toolset'] config = spec['configurations'][config_name] self.target = Target(spec['type']) self.is_standalone_static_library = bool( spec.get('standalone_static_library', 0)) self.uses_cpp = False self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) self.xcode_settings = self.msvs_settings = None if self.flavor == 'mac': self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) if self.flavor == 'win': self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags) arch = self.msvs_settings.GetArch(config_name) self.ninja.variable('arch', self.win_env[arch]) self.ninja.variable('cc', '$cl_' + arch) self.ninja.variable('cxx', '$cl_' + arch) self.ninja.variable('cc_host', '$cl_' + arch) self.ninja.variable('cxx_host', '$cl_' + arch) if self.flavor == 'mac': self.archs = self.xcode_settings.GetActiveArchs(config_name) if len(self.archs) > 1: self.arch_subninjas = dict( (arch, ninja_syntax.Writer( OpenOutput(os.path.join(self.toplevel_build, self._SubninjaNameForArch(arch)), 'w'))) for arch in self.archs) actions_depends = [] compile_depends = [] if 'dependencies' in spec: for dep in spec['dependencies']: if dep in self.target_outputs: target = self.target_outputs[dep] actions_depends.append(target.PreActionInput(self.flavor)) compile_depends.append(target.PreCompileInput()) actions_depends = filter(None, actions_depends) compile_depends = filter(None, compile_depends) actions_depends = self.WriteCollapsedDependencies('actions_depends', actions_depends) compile_depends = self.WriteCollapsedDependencies('compile_depends', compile_depends) self.target.preaction_stamp = actions_depends self.target.precompile_stamp = compile_depends extra_sources = [] mac_bundle_depends = [] self.target.actions_stamp = self.WriteActionsRulesCopies( spec, extra_sources, actions_depends, mac_bundle_depends) compile_depends_stamp = (self.target.actions_stamp or compile_depends) link_deps = [] sources = extra_sources + spec.get('sources', []) if sources: if self.flavor == 'mac' and len(self.archs) > 1: for arch in self.archs: self.ninja.subninja(self._SubninjaNameForArch(arch)) pch = None if self.flavor == 'win': gyp.msvs_emulation.VerifyMissingSources( sources, self.abs_build_dir, generator_flags, self.GypPathToNinja) pch = gyp.msvs_emulation.PrecompiledHeader( self.msvs_settings, config_name, self.GypPathToNinja, self.GypPathToUniqueOutput, self.obj_ext) else: pch = gyp.xcode_emulation.MacPrefixHeader( self.xcode_settings, self.GypPathToNinja, lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)) link_deps = self.WriteSources( self.ninja, config_name, config, sources, compile_depends_stamp, pch, spec) obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] if obj_outputs: if self.flavor != 'mac' or len(self.archs) == 1: link_deps += [self.GypPathToNinja(o) for o in obj_outputs] else: print "Warning: Actions/rules writing object files don't work with " "multiarch targets, dropping. (target %s)" % spec['target_name'] elif self.flavor == 'mac' and len(self.archs) > 1: link_deps = collections.defaultdict(list) if self.flavor == 'win' and self.target.type == 'static_library': self.target.component_objs = link_deps output = None is_empty_bundle = not link_deps and not mac_bundle_depends if link_deps or self.target.actions_stamp or actions_depends: output = self.WriteTarget(spec, config_name, config, link_deps, self.target.actions_stamp or actions_depends) if self.is_mac_bundle: mac_bundle_depends.append(output) if self.is_mac_bundle: output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle) if not output: return None assert self.target.FinalOutput(), output return self.target
The main entry point for NinjaWriter: write the build rules for a spec. Returns a Target object, which represents the output paths for this spec. Returns None if there are no outputs (e.g. a settings-only 'none' type target).
https://github.com/svn2github/gyp/blob/e0ee72ddc7fb97eb33d530cf684efcbe4d27ecb3/pylib/gyp/generator/ninja.py#L368-L504
import collections import copy import hashlib import json import multiprocessing import os.path import re import signal import subprocess import sys import gyp import gyp.common from gyp.common import OrderedSet import gyp.msvs_emulation import gyp.MSVSUtil as MSVSUtil import gyp.xcode_emulation from cStringIO import StringIO from gyp.common import GetEnvironFallback import gyp.ninja_syntax as ninja_syntax generator_default_variables = { 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '', 'STATIC_LIB_PREFIX': 'lib', 'STATIC_LIB_SUFFIX': '.a', 'SHARED_LIB_PREFIX': 'lib', 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen', 'PRODUCT_DIR': '$!PRODUCT_DIR', 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME', 'RULE_INPUT_ROOT': '${root}', 'RULE_INPUT_DIRNAME': '${dirname}', 'RULE_INPUT_PATH': '${source}', 'RULE_INPUT_EXT': '${ext}', 'RULE_INPUT_NAME': '${name}', } generator_additional_non_configuration_keys = [] generator_additional_path_sections = [] generator_extra_sources_for_rules = [] generator_filelist_paths = None generator_supports_multiple_toolsets = ( os.environ.get('GYP_CROSSCOMPILE') or os.environ.get('AR_host') or os.environ.get('CC_host') or os.environ.get('CXX_host') or os.environ.get('AR_target') or os.environ.get('CC_target') or os.environ.get('CXX_target')) def StripPrefix(arg, prefix): if arg.startswith(prefix): return arg[len(prefix):] return arg def QuoteShellArgument(arg, flavor): if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg): return arg if flavor == 'win': return gyp.msvs_emulation.QuoteForRspFile(arg) return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" def Define(d, flavor): if flavor == 'win': d = d.replace('#', '\\%03o' % ord('#')) return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor) def AddArch(output, arch): output, extension = os.path.splitext(output) return '%s.%s%s' % (output, arch, extension) class Target(object): def __init__(self, type): self.type = type self.preaction_stamp = None self.precompile_stamp = None self.actions_stamp = None self.binary = None self.bundle = None self.component_objs = None self.import_lib = None def Linkable(self): return self.type in ('static_library', 'shared_library') def UsesToc(self, flavor): if flavor == 'win' or self.bundle: return False return self.type in ('shared_library', 'loadable_module') def PreActionInput(self, flavor): if self.UsesToc(flavor): return self.FinalOutput() + '.TOC' return self.FinalOutput() or self.preaction_stamp def PreCompileInput(self): return self.actions_stamp or self.precompile_stamp def FinalOutput(self): return self.bundle or self.binary or self.actions_stamp class NinjaWriter(object): def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir, output_file, toplevel_build, output_file_name, flavor, toplevel_dir=None): self.hash_for_rules = hash_for_rules self.target_outputs = target_outputs self.base_dir = base_dir self.build_dir = build_dir self.ninja = ninja_syntax.Writer(output_file) self.toplevel_build = toplevel_build self.output_file_name = output_file_name self.flavor = flavor self.abs_build_dir = None if toplevel_dir is not None: self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir)) self.obj_ext = '.obj' if flavor == 'win' else '.o' if flavor == 'win': self.win_env = {} for arch in ('x86', 'x64'): self.win_env[arch] = 'environment.' + arch build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) self.build_to_base = os.path.join(build_to_top, base_dir) base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) self.base_to_build = os.path.join(base_to_top, build_dir) def ExpandSpecial(self, path, product_dir=None): PRODUCT_DIR = '$!PRODUCT_DIR' if PRODUCT_DIR in path: if product_dir: path = path.replace(PRODUCT_DIR, product_dir) else: path = path.replace(PRODUCT_DIR + '/', '') path = path.replace(PRODUCT_DIR + '\\', '') path = path.replace(PRODUCT_DIR, '.') INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR' if INTERMEDIATE_DIR in path: int_dir = self.GypPathToUniqueOutput('gen') path = path.replace(INTERMEDIATE_DIR, os.path.join(product_dir or '', int_dir)) CONFIGURATION_NAME = '$|CONFIGURATION_NAME' path = path.replace(CONFIGURATION_NAME, self.config_name) return path def ExpandRuleVariables(self, path, root, dirname, source, ext, name): if self.flavor == 'win': path = self.msvs_settings.ConvertVSMacros( path, config=self.config_name) path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root) path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'], dirname) path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source) path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext) path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name) return path def GypPathToNinja(self, path, env=None): if env: if self.flavor == 'mac': path = gyp.xcode_emulation.ExpandEnvVars(path, env) elif self.flavor == 'win': path = gyp.msvs_emulation.ExpandMacros(path, env) if path.startswith('$!'): expanded = self.ExpandSpecial(path) if self.flavor == 'win': expanded = os.path.normpath(expanded) return expanded if '$|' in path: path = self.ExpandSpecial(path) assert '$' not in path, path return os.path.normpath(os.path.join(self.build_to_base, path)) def GypPathToUniqueOutput(self, path, qualified=True): path = self.ExpandSpecial(path) assert not path.startswith('$'), path obj = 'obj' if self.toolset != 'target': obj += '.' + self.toolset path_dir, path_basename = os.path.split(path) if qualified: path_basename = self.name + '.' + path_basename return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, path_basename)) def WriteCollapsedDependencies(self, name, targets, order_only=None): assert targets == filter(None, targets), targets if len(targets) == 0: assert not order_only return None if len(targets) > 1 or order_only: stamp = self.GypPathToUniqueOutput(name + '.stamp') targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only) self.ninja.newline() return targets[0] def _SubninjaNameForArch(self, arch): output_file_base = os.path.splitext(self.output_file_name)[0] return '%s.%s.ninja' % (output_file_base, arch)
BSD 3-Clause New or Revised License
rlbot/rlbot
src/main/python/rlbot/botmanager/bot_manager_struct.py
BotManagerStruct.__init__
python
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.rigid_body_tick = None
See documentation on BotManager.
https://github.com/rlbot/rlbot/blob/b2d06110b0c8541a85605b6c00dcb898e7f35946/src/main/python/rlbot/botmanager/bot_manager_struct.py#L13-L18
from time import sleep from rlbot.agents.base_agent import BaseAgent from rlbot.botmanager.bot_manager import BotManager from rlbot.utils.logging_utils import get_logger from rlbot.utils.structures import ball_prediction_struct as bp from rlbot.utils.structures import game_data_struct as gd from rlbot.utils.structures.bot_input_struct import PlayerInput from rlbot.utils.structures.rigid_body_struct import RigidBodyTick class BotManagerStruct(BotManager):
MIT License
frawau/aiolifx
aiolifx/aiolifx.py
Device.resp_set_hostfirmware
python
def resp_set_hostfirmware(self, resp): if resp: self.host_firmware_version = ( str(resp.version >> 16) + "." + str(resp.version & 0xFFFF) ) self.host_firmware_build_timestamp = resp.build
Default callback for get_hostfirmware
https://github.com/frawau/aiolifx/blob/7ba0a3775e273ac878f286e9685939f8fa19da70/aiolifx/aiolifx.py#L685-L691
import asyncio as aio from .message import BROADCAST_MAC, BROADCAST_SOURCE_ID from .msgtypes import * from .products import * from .unpack import unpack_lifx_message from functools import partial import time, random, datetime, socket, ifaddr LISTEN_IP = "0.0.0.0" UDP_BROADCAST_IP = "255.255.255.255" UDP_BROADCAST_PORT = 56700 DEFAULT_TIMEOUT = 0.5 DEFAULT_ATTEMPTS = 3 DISCOVERY_INTERVAL = 180 DISCOVERY_STEP = 5 def mac_to_ipv6_linklocal(mac, prefix="fe80::"): mac_value = int( mac.translate(str.maketrans(dict([(x, None) for x in [" ", ".", ":", "-"]]))), 16, ) high2 = mac_value >> 32 & 0xFFFF ^ 0x0200 high1 = mac_value >> 24 & 0xFF low1 = mac_value >> 16 & 0xFF low2 = mac_value & 0xFFFF return prefix + ":{:04x}:{:02x}ff:fe{:02x}:{:04x}".format(high2, high1, low1, low2) def nanosec_to_hours(ns): return ns / (1000000000.0 * 60 * 60) class Device(aio.DatagramProtocol): def __init__(self, loop, mac_addr, ip_addr, port, parent=None): self.loop = loop self.mac_addr = mac_addr self.ip_addr = ip_addr self.port = port self.parent = parent self.registered = False self.retry_count = DEFAULT_ATTEMPTS self.timeout = DEFAULT_TIMEOUT self.unregister_timeout = DEFAULT_TIMEOUT self.transport = None self.task = None self.seq = 0 self.message = {} self.source_id = random.randint(0, (2 ** 32) - 1) self.default_callb = None self.label = None self.location = None self.group = None self.power_level = None self.vendor = None self.product = None self.version = None self.host_firmware_version = None self.host_firmware_build_timestamp = None self.wifi_firmware_version = None self.wifi_firmware_build_timestamp = None self.lastmsg = datetime.datetime.now() def seq_next(self): self.seq = (self.seq + 1) % 128 return self.seq def connection_made(self, transport): self.transport = transport self.register() def datagram_received(self, data, addr): self.register() response = unpack_lifx_message(data) self.lastmsg = datetime.datetime.now() if response.seq_num in self.message: response_type, myevent, callb = self.message[response.seq_num] if type(response) == response_type: if response.source_id == self.source_id: if "State" in response.__class__.__name__: setmethod = ( "resp_set_" + response.__class__.__name__.replace("State", "").lower() ) if setmethod in dir(self) and callable( getattr(self, setmethod) ): getattr(self, setmethod)(response) if callb: callb(self, response) myevent.set() del self.message[response.seq_num] elif type(response) == Acknowledgement: pass else: del self.message[response.seq_num] elif self.default_callb: self.default_callb(response) def register(self): if not self.registered: self.registered = True if self.parent: self.parent.register(self) def unregister(self): if self.registered: if ( datetime.datetime.now() - datetime.timedelta(seconds=self.unregister_timeout) > self.lastmsg ): self.registered = False if self.parent: self.parent.unregister(self) def cleanup(self): if self.transport: self.transport.close() self.transport = None if self.task: self.task.cancel() self.task = None async def fire_sending(self, msg, num_repeats): if num_repeats is None: num_repeats = self.retry_count sent_msg_count = 0 sleep_interval = 0.05 while sent_msg_count < num_repeats: if self.transport: self.transport.sendto(msg.packed_message) sent_msg_count += 1 await aio.sleep( sleep_interval ) def fire_and_forget( self, msg_type, payload={}, timeout_secs=None, num_repeats=None ): msg = msg_type( self.mac_addr, self.source_id, seq_num=0, payload=payload, ack_requested=False, response_requested=False, ) xx = self.loop.create_task(self.fire_sending(msg, num_repeats)) return True async def try_sending(self, msg, timeout_secs, max_attempts): if timeout_secs is None: timeout_secs = self.timeout if max_attempts is None: max_attempts = self.retry_count attempts = 0 while attempts < max_attempts: if msg.seq_num not in self.message: return event = aio.Event() self.message[msg.seq_num][1] = event attempts += 1 if self.transport: self.transport.sendto(msg.packed_message) try: myresult = await aio.wait_for(event.wait(), timeout_secs) break except Exception as inst: if attempts >= max_attempts: if msg.seq_num in self.message: callb = self.message[msg.seq_num][2] if callb: callb(self, None) del self.message[msg.seq_num] self.unregister() def req_with_ack( self, msg_type, payload, callb=None, timeout_secs=None, max_attempts=None ): msg = msg_type( self.mac_addr, self.source_id, seq_num=self.seq_next(), payload=payload, ack_requested=True, response_requested=False, ) self.message[msg.seq_num] = [Acknowledgement, None, callb] xx = self.loop.create_task(self.try_sending(msg, timeout_secs, max_attempts)) return True def req_with_resp( self, msg_type, response_type, payload={}, callb=None, timeout_secs=None, max_attempts=None, ): msg = msg_type( self.mac_addr, self.source_id, seq_num=self.seq_next(), payload=payload, ack_requested=False, response_requested=True, ) self.message[msg.seq_num] = [response_type, None, callb] xx = self.loop.create_task(self.try_sending(msg, timeout_secs, max_attempts)) return True def req_with_ack_resp( self, msg_type, response_type, payload, callb=None, timeout_secs=None, max_attempts=None, ): msg = msg_type( self.mac_addr, self.source_id, seq_num=self.seq_next(), payload=payload, ack_requested=True, response_requested=True, ) self.message[msg.seq_num] = [response_type, None, callb] xx = self.loop.create_task(self.try_sending(msg, timeout_secs, max_attempts)) return True def get_label(self, callb=None): if self.label is None: mypartial = partial(self.resp_set_label) if callb: mycallb = lambda x, y: (mypartial(y), callb(x, y)) else: mycallb = lambda x, y: mypartial(y) response = self.req_with_resp(GetLabel, StateLabel, callb=mycallb) return self.label def set_label(self, value, callb=None): if len(value) > 32: value = value[:32] mypartial = partial(self.resp_set_label, label=value) if callb: self.req_with_ack( SetLabel, {"label": value}, lambda x, y: (mypartial(y), callb(x, y)) ) else: self.req_with_ack(SetLabel, {"label": value}, lambda x, y: mypartial(y)) def resp_set_label(self, resp, label=None): if label: self.label = label elif resp: self.label = resp.label.decode().replace("\x00", "") def get_location(self, callb=None): if self.location is None: mypartial = partial(self.resp_set_location) if callb: mycallb = lambda x, y: (mypartial(y), callb(x, y)) else: mycallb = lambda x, y: mypartial(y) response = self.req_with_resp(GetLocation, StateLocation, callb=mycallb) return self.location def resp_set_location(self, resp, location=None): if location: self.location = location elif resp: self.location = resp.label.decode().replace("\x00", "") def get_group(self, callb=None): if self.group is None: mypartial = partial(self.resp_set_group) if callb: mycallb = lambda x, y: (mypartial(y), callb(x, y)) else: mycallb = lambda x, y: mypartial(y) response = self.req_with_resp(GetGroup, StateGroup, callb=callb) return self.group def resp_set_group(self, resp, group=None): if group: self.group = group elif resp: self.group = resp.label.decode().replace("\x00", "") def get_power(self, callb=None): if self.power_level is None: response = self.req_with_resp(GetPower, StatePower, callb=callb) return self.power_level def set_power(self, value, callb=None, rapid=False): on = [True, 1, "on"] off = [False, 0, "off"] mypartial = partial(self.resp_set_power, power_level=value) if callb: mycallb = lambda x, y: (mypartial(y), callb(x, y)) else: mycallb = lambda x, y: mypartial(y) if value in on and not rapid: response = self.req_with_ack(SetPower, {"power_level": 65535}, mycallb) elif value in off and not rapid: response = self.req_with_ack(SetPower, {"power_level": 0}, mycallb) elif value in on and rapid: response = self.fire_and_forget(SetPower, {"power_level": 65535}) self.power_level = 65535 elif value in off and rapid: response = self.fire_and_forget(SetPower, {"power_level": 0}) self.power_level = 0 def resp_set_power(self, resp, power_level=None): if power_level is not None: self.power_level = power_level elif resp: self.power_level = resp.power_level def get_wififirmware(self, callb=None): if self.wifi_firmware_version is None: mypartial = partial(self.resp_set_wififirmware) if callb: mycallb = lambda x, y: (mypartial(y), callb(x, y)) else: mycallb = lambda x, y: mypartial(y) response = self.req_with_resp(GetWifiFirmware, StateWifiFirmware, mycallb) return (self.wifi_firmware_version, self.wifi_firmware_build_timestamp) def resp_set_wififirmware(self, resp): if resp: self.wifi_firmware_version = float( str(str(resp.version >> 16) + "." + str(resp.version & 0xFF)) ) self.wifi_firmware_build_timestamp = resp.build def get_wifiinfo(self, callb=None): response = self.req_with_resp(GetWifiInfo, StateWifiInfo, callb=callb) return None def get_hostfirmware(self, callb=None): if self.host_firmware_version is None: mypartial = partial(self.resp_set_hostfirmware) if callb: mycallb = lambda x, y: (mypartial(y), callb(x, y)) else: mycallb = lambda x, y: mypartial(y) response = self.req_with_resp(GetHostFirmware, StateHostFirmware, mycallb) return (self.host_firmware_version, self.host_firmware_build_timestamp)
MIT License
commvault/cvpysdk
cvpysdk/deployment/install.py
Install.install_software
python
def install_software( self, client_computers=None, windows_features=None, unix_features=None, username=None, password=None, install_path=None, log_file_loc=None, client_group_name=None, storage_policy_name=None, sw_cache_client=None, **kwargs): if windows_features: os_type = 0 install_options = [{'osType': 'Windows', 'ComponentId': feature_id} for feature_id in windows_features] elif unix_features: os_type = 1 install_options = [{'osType': 'Unix', 'ComponentId': feature_id} for feature_id in unix_features] else: raise SDKException('Install', '105') if client_computers: commcell_name = self.commcell_object.commserv_name client_details = [] for client_name in client_computers: client_details.append( { "clientEntity": { "clientName": client_name, "commCellName": commcell_name } }) else: raise SDKException('Install', '106') if client_group_name: client_group_name = [x.lower() for x in client_group_name] if not set(client_group_name).issubset(self.commcell_object.client_groups.all_clientgroups): raise SDKException('Install', '103') selected_client_groups = [{'clientGroupName': client_group} for client_group in client_group_name] install_flags = kwargs.get('install_flags') request_json = { "taskInfo": { "associations": [ { "commCellId": 2 } ], "task": { "taskType": 1, "initiatedFrom": 1, "taskFlags": { "disabled": False } }, "subTasks": [ { "subTask": { "subTaskType": 1, "operationType": 4026 }, "options": { "adminOpts": { "clientInstallOption": { "reuseADCredentials": False, "installOSType": os_type, "discoveryType": 0, "installerOption": { "requestType": 0, "Operationtype": 0, "CommServeHostName": self.commcell_object.commserv_hostname, "RemoteClient": False, "installFlags": { "allowMultipleInstances": True, "restoreOnlyAgents": False, "killBrowserProcesses": True, "install32Base": install_flags.get('install32Base', False) if install_flags else False, "disableOSFirewall": False, "stopOracleServices": False, "skipClientsOfCS": False, "addToFirewallExclusion": True, "ignoreJobsRunning": False, "forceReboot": False, "overrideClientInfo": True, "preferredIPFamily": install_flags.get('preferredIPFamily', 1) if install_flags else 1, "firewallInstall": { "enableFirewallConfig": False, "firewallConnectionType": 0, "portNumber": 0 } }, "User": { "userName": "admin", "userId": 1 }, "clientComposition": [ { "overrideSoftwareCache": True if sw_cache_client else False, "softwareCacheOrSrmProxyClient": { "clientName": sw_cache_client if sw_cache_client else "" }, "packageDeliveryOption": 0, "components": { "commonInfo": { "globalFilters": 2, "storagePolicyToUse": { "storagePolicyName": storage_policy_name if storage_policy_name else "" } }, "fileSystem": { "configureForLaptopBackups": False }, "componentInfo": install_options, }, "clientInfo": { "clientGroups": selected_client_groups if client_group_name else [], "client": { "evmgrcPort": 0, "cvdPort": 0, "installDirectory": install_path if install_path else "" }, "clientProps": { "logFilesLocation": log_file_loc if log_file_loc else "" } } } ] }, "clientDetails": client_details, "clientAuthForJob": { "password": password, "userName": username } }, "updateOption": { "rebootClient": True } } } } ] } } flag, response = self._cvpysdk_object.make_request( 'POST', self._services['CREATE_TASK'], request_json ) if flag: if response.json(): if "jobIds" in response.json(): return Job(self.commcell_object, response.json()['jobIds'][0]) else: raise SDKException('Install', '107') else: raise SDKException('Response', '102') else: raise SDKException('Response', '101')
Installs the features selected on the given machines Args: client_computers (list) -- list of hostnames/IP address to install the features on default : None windows_features (list of enum) -- list of windows features to be installed default : None unix_features (list of enum) -- list of unix features to be installed default : None username (str) -- username of the machine to install features on default : None password (str) -- base64 encoded password default : None install_path (str) -- Install to a specified path on the client default : None log_file_loc (str) -- Install to a specified log path on the client default : None client_group_name (list) -- List of client groups for the client default : None storage_policy_name (str) -- Storage policy for the default subclient default : None sw_cache_client (str) -- Remote Cache Client Name/ Over-riding Software Cache default : None (Use CS Cache by default) **kwargs: (dict) -- Key value pairs for supporting conditional initializations Supported - install_flags (dict) - dictionary of install flag values Ex : install_flags = {"preferredIPFamily":2, "install32Base":True} Returns: object - instance of the Job class for this install_software job Raises: SDKException: if install job failed if response is empty if response is not success Usage: - UnixDownloadFeatures and WindowsDownloadFeatures enum is used for providing input to the install_software method, it can be imported by >>> from cvpysdk.deployment.deploymentconstants import UnixDownloadFeatures from cvpysdk.deployment.deploymentconstants import WindowsDownloadFeatures - sample method call >>> commcell_obj.install_software( client_computers=[win_machine1, win_machine2], windows_features=[WindowsDownloadFeatures.FILE_SYSTEM.value], unix_features=None, username='username', password='password', install_path='C:\\Temp, log_file_loc='/var/log', client_group_name=[My_Servers], storage_policy_name='My_Storage_Policy', install_flags={"preferredIPFamily":2}) **NOTE:** Either Unix or Windows clients_computers should be chosen and not both
https://github.com/commvault/cvpysdk/blob/66df30e6e31d619812b7756cb4f7e130b220a08f/cvpysdk/deployment/install.py#L342-L599
from ..job import Job from ..exception import SDKException class Install(object): def __init__(self, commcell_object): self.commcell_object = commcell_object self._services = commcell_object._services self._cvpysdk_object = commcell_object._cvpysdk_object def repair_software(self, client=None, client_group=None, username=None, password=None, reboot_client=False): if (client is None) and (client_group is None): raise SDKException('Install', '100') if client: client_group = "" if not client in self.commcell_object.clients.all_clients: raise SDKException('Install', '101') elif client_group: client = "" if not client_group in self.commcell_object.client_groups.all_clientgroups: raise SDKException('Install', '102') request_json = { "taskInfo": { "task": { "taskType": 1, "initiatedFrom": 2, "policyType": 0, "taskFlags": { "disabled": False } }, "subTasks": [ { "subTaskOperation": 1, "subTask": { "subTaskType": 1, "operationType": 4020 }, "options": { "adminOpts": { "clientInstallOption": { "installerOption": { "clientComposition": [ { "packageDeliveryOption": 0 } ] } }, "updateOption": { "installUpdateOptions": 0, "restartExplorerPlugin": True, "rebootClient": reboot_client, "clientAndClientGroups": [ { "clientGroupName": client_group, "clientName": client } ], "installUpdatesJobType": { "installType": 4, "upgradeClients": False, "undoUpdates": False, "installUpdates": False } } } } } ] } } if username: request_json["taskInfo"]["subTasks"][0]["options"]["adminOpts"]["clientInstallOption"]["clientAuthForJob"] = { "password": password, "userName": username } flag, response = self._cvpysdk_object.make_request( 'POST', self._services['CREATE_TASK'], request_json ) if flag: if response.json(): if "jobIds" in response.json(): return Job(self.commcell_object, response.json()['jobIds'][0]) else: raise SDKException('Install', '107') else: raise SDKException('Response', '102') else: raise SDKException('Response', '101') def push_servicepack_and_hotfix( self, client_computers=None, client_computer_groups=None, all_client_computers=False, all_client_computer_groups=False, reboot_client=False, run_db_maintenance=True, maintenance_release_only=False): selected_clients = [] selected_client_groups = [] if not any([all_client_computers, all_client_computer_groups, client_computers, client_computer_groups]): raise SDKException('Install', '101') commcell_client_computers = self.commcell_object.clients.all_clients commcell_client_computer_groups = self.commcell_object.client_groups.all_clientgroups if client_computers is not None: client_computers = [x.lower() for x in client_computers] if not set(client_computers).issubset(commcell_client_computers): raise SDKException('Install', '102') selected_clients = [{'clientName': client} for client in client_computers] if client_computer_groups is not None: client_computer_groups = [x.lower() for x in client_computer_groups] if not set(client_computer_groups).issubset(commcell_client_computer_groups): raise SDKException('Install', '103') selected_client_groups = [{'clientGroupName': client} for client in client_computer_groups] if all_client_computers: selected_clients = [{"_type_": 2}] if all_client_computer_groups: selected_client_groups = [{"_type_": 27}] all_clients = selected_clients + selected_client_groups request_json = { "taskInfo": { "task": { "taskType": 1, "initiatedFrom": 2, "policyType": 0, "alert": { "alertName": "" }, "taskFlags": { "isEdgeDrive": False, "disabled": False } }, "subTasks": [ { "subTaskOperation": 1, "subTask": { "subTaskType": 1, "operationType": 4020 }, "options": { "adminOpts": { "updateOption": { "removeIntersectingDiag": True, "restartExplorerPlugin": True, "rebootClient": reboot_client, "runDBMaintenance": run_db_maintenance, "maintenanceReleaseOnly": maintenance_release_only, "clientAndClientGroups": all_clients, "installUpdatesJobType": { "upgradeClients": False, "undoUpdates": False, "installUpdates": True } } }, } } ] } } flag, response = self._cvpysdk_object.make_request( 'POST', self._services['CREATE_TASK'], request_json ) if flag: if response.json(): if "jobIds" in response.json(): return Job(self.commcell_object, response.json()['jobIds'][0]) else: raise SDKException('Install', '107') else: raise SDKException('Response', '102') else: raise SDKException('Response', '101')
Apache License 2.0
tensortrade-org/tensortrade
tensortrade/stochastic/utils/parameters.py
default
python
def default(base_price: float, t_gen: int, delta: float) -> 'ModelParameters': return ModelParameters( all_s0=base_price, all_r0=0.5, all_time=t_gen, all_delta=delta, all_sigma=0.125, gbm_mu=0.058, jumps_lambda=0.00125, jumps_sigma=0.001, jumps_mu=-0.2, cir_a=3.0, cir_mu=0.5, cir_rho=0.5, ou_a=3.0, ou_mu=0.5, heston_a=0.25, heston_mu=0.35, heston_vol0=0.06125 )
Creates a basic model parameter set with key parameters specified default parameters. Parameters ---------- base_price : float The base price to use for price generation. t_gen : int The number of bars to generate. delta : float The time delta to use. Returns ------- `ModelParameters` The default model parameters to use.
https://github.com/tensortrade-org/tensortrade/blob/65151a72090ab372ed7a33edc673e45b53d2f763/tensortrade/stochastic/utils/parameters.py#L97-L133
from random import uniform class ModelParameters: def __init__(self, all_s0: float, all_time: int, all_delta: float, all_sigma: float, gbm_mu: float, jumps_lambda: float = 0.0, jumps_sigma: float = 0.0, jumps_mu: float = 0.0, cir_a: float = 0.0, cir_mu: float = 0.0, all_r0: float = 0.0, cir_rho: float = 0.0, ou_a: float = 0.0, ou_mu: float = 0.0, heston_a: float = 0.0, heston_mu: float = 0.0, heston_vol0: float = 0.0) -> None: self.all_s0 = all_s0 self.all_time = all_time self.all_delta = all_delta self.all_sigma = all_sigma self.gbm_mu = gbm_mu self.lamda = jumps_lambda self.jumps_sigma = jumps_sigma self.jumps_mu = jumps_mu self.cir_a = cir_a self.cir_mu = cir_mu self.all_r0 = all_r0 self.cir_rho = cir_rho self.ou_a = ou_a self.ou_mu = ou_mu self.heston_a = heston_a self.heston_mu = heston_mu self.heston_vol0 = heston_vol0
Apache License 2.0
biolink/ontobio
ontobio/ontol.py
Ontology.subontology
python
def subontology(self, nodes=None, minimal=False, relations=None): g = None if nodes is not None: g = self.subgraph(nodes) else: g = self.get_graph() if minimal: from ontobio.slimmer import get_minimal_subgraph g = get_minimal_subgraph(g, nodes) ont = Ontology(graph=g, xref_graph=self.xref_graph) if relations is not None: g = ont.get_filtered_graph(relations) ont = Ontology(graph=g, xref_graph=self.xref_graph) return ont
Return a new ontology that is an extract of this one Arguments --------- - nodes: list list of node IDs to include in subontology. If None, all are used - relations: list list of relation IDs to include in subontology. If None, all are used
https://github.com/biolink/ontobio/blob/da9c5ff912785ee4ab98a8a39585562ecd2bdef5/ontobio/ontol.py#L173-L198
import networkx as nx import logging import re logger = logging.getLogger(__name__) class Ontology(): def __init__(self, handle=None, id=None, graph=None, xref_graph=None, meta=None, payload=None, graphdoc=None): self.handle = handle self.meta = meta if id is None: if payload is not None: id = payload.get('id') if id is None: id = handle self.id = id self.graph = graph if self.graph is None: self.graph = nx.MultiDiGraph() logger.debug('Graph initialized, nodes={}'.format(self.graph.nodes())) self.xref_graph = xref_graph self.graphdoc = graphdoc self.all_logical_definitions = [] self.all_property_chain_axioms = [] if payload is not None: self.meta = payload.get('meta') self.graph = payload.get('graph') self.xref_graph = payload.get('xref_graph') self.graphdoc = payload.get('graphdoc') self.all_logical_definitions = payload.get('logical_definitions') self.all_property_chain_axioms = payload.get('property_chain_axioms') def __str__(self): return '{} handle: {} meta: {}'.format(self.id, self.handle, self.meta) def __repr__(self): return self.__str__() def get_graph(self): return self.graph def get_filtered_graph(self, relations=None, prefix=None): self.all_synonyms() self.all_obsoletes() srcg = self.get_graph() if prefix is not None: srcg = srcg.subgraph([n for n in srcg.nodes() if n.startswith(prefix+":")]) if relations is None: logger.info("No filtering on "+str(self)) return srcg logger.info("Filtering {} for {}".format(self, relations)) g = nx.MultiDiGraph() logger.info("copying nodes") for (n,d) in srcg.nodes(data=True): g.add_node(n, **d) logger.info("copying edges") num_edges = 0 for (x,y,d) in srcg.edges(data=True): if d['pred'] in relations: num_edges += 1 g.add_edge(x,y,**d) logger.info("Filtered edges: {}".format(num_edges)) return g def merge(self, ontologies): if self.xref_graph is None: self.xref_graph = nx.MultiGraph() logger.info("Merging source: {} xrefs: {}".format(self, len(self.xref_graph.edges()))) for ont in ontologies: logger.info("Merging {} into {}".format(ont, self)) g = self.get_graph() srcg = ont.get_graph() for n in srcg.nodes(): g.add_node(n, **srcg.nodes[n]) for (o,s,m) in srcg.edges(data=True): g.add_edge(o,s,**m) if ont.xref_graph is not None: for (o,s,m) in ont.xref_graph.edges(data=True): self.xref_graph.add_edge(o,s,**m) if ont.all_logical_definitions is not None: for ld in ont.all_logical_definitions: self.add_logical_definition(ld) if ont.all_property_chain_axioms is not None: for pca in ont.all_property_chain_axioms: self.add_property_chain_axiom(pca) def subgraph(self, nodes=None): if nodes is None: nodes = [] return self.get_graph().subgraph(nodes)
BSD 3-Clause New or Revised License
tresamigossd/smv
src/test/python/testModuleHash/before/src/main/python/stage/modules.py
sameFunc
python
def sameFunc(): return "I'm the same!"
a function which is the same in before/after
https://github.com/tresamigossd/smv/blob/e12257b5b07113d805e7fdd8de41cbcf72120ed7/src/test/python/testModuleHash/before/src/main/python/stage/modules.py#L17-L19
import udl as lib import same as unchanged from smv import SmvApp, SmvModule, SmvHiveTable, SmvCsvFile
Apache License 2.0
jfilak/sapcli
sap/platform/language.py
iso_code_to_sap_code
python
def iso_code_to_sap_code(iso_code: str) -> str: try: return next((entry[1] for entry in CODE_LIST if entry[0] == iso_code)) except StopIteration: raise SAPCliError(f'Not found ISO Code: {iso_code}')
Coverts ISO codes to one letter SAP language codes
https://github.com/jfilak/sapcli/blob/072ab1b8d58ea58e4f4bd67fc4f349a6b0b52fac/sap/platform/language.py#L66-L73
from sap.errors import SAPCliError CODE_LIST = [ ('AF', 'a'), ('AR', 'A'), ('BG', 'W'), ('CA', 'c'), ('ZH', '1'), ('ZF', 'M'), ('HR', '6'), ('CS', 'C'), ('DA', 'K'), ('NL', 'N'), ('EN', 'E'), ('ET', '9'), ('FI', 'U'), ('FR', 'F'), ('DE', 'D'), ('EL', 'G'), ('HE', 'B'), ('HU', 'H'), ('IS', 'b'), ('ID', 'i'), ('IT', 'I'), ('JA', 'J'), ('KO', '3'), ('LV', 'Y'), ('LT', 'X'), ('MS', '7'), ('NO', 'O'), ('PL', 'L'), ('PT', 'P'), ('Z1', 'Z'), ('RO', '4'), ('RU', 'R'), ('SR', '0'), ('SH', 'd'), ('SK', 'Q'), ('SL', '5'), ('ES', 'S'), ('SV', 'V'), ('TH', '2'), ('TR', 'T'), ('UK', '8') ] def sap_code_to_iso_code(sap_code: str) -> str: try: return next((entry[0] for entry in CODE_LIST if entry[1] == sap_code)) except StopIteration: raise SAPCliError(f'Not found SAP Language Code: {sap_code}')
Apache License 2.0
bachmann1234/diff_cover
diff_cover/violationsreporters/violations_reporter.py
XmlCoverageReporter._cache_file
python
def _cache_file(self, src_path): if src_path not in self._info_cache: violations = None measured = set() for xml_document in self._xml_roots: if xml_document.findall(".[@clover]"): line_nodes = self.get_src_path_line_nodes_clover( xml_document, src_path ) _number = "num" _hits = "count" elif xml_document.findall(".[@name]"): line_nodes = self.get_src_path_line_nodes_jacoco( xml_document, src_path ) _number = "nr" _hits = "ci" else: line_nodes = self.get_src_path_line_nodes_cobertura( xml_document, src_path ) _number = "number" _hits = "hits" if line_nodes is None: continue if violations is None: violations = { Violation(int(line.get(_number)), None) for line in line_nodes if int(line.get(_hits, 0)) == 0 } else: violations = violations & { Violation(int(line.get(_number)), None) for line in line_nodes if int(line.get(_hits, 0)) == 0 } measured = measured | {int(line.get(_number)) for line in line_nodes} if violations is None: violations = set() self._info_cache[src_path] = (violations, measured)
Load the data from `self._xml_roots` for `src_path`, if it hasn't been already.
https://github.com/bachmann1234/diff_cover/blob/fc4be9767ebd5c31d29c34d1720c8fe2ad5f00ca/diff_cover/violationsreporters/violations_reporter.py#L169-L238
import itertools import os import os.path import re from collections import defaultdict from diff_cover import util from diff_cover.command_runner import run_command_for_code from diff_cover.git_path import GitPathTool from diff_cover.violationsreporters.base import ( BaseViolationReporter, QualityDriver, RegexBasedDriver, Violation, ) class XmlCoverageReporter(BaseViolationReporter): def __init__(self, xml_roots, src_roots=None): super().__init__("XML") self._xml_roots = xml_roots self._info_cache = defaultdict(list) self._src_roots = src_roots or [""] def _get_classes(self, xml_document, src_path): src_rel_path = util.to_unix_path(GitPathTool.relative_path(src_path)) src_abs_path = util.to_unix_path(GitPathTool.absolute_path(src_path)) sources = xml_document.findall("sources/source") sources = [source.text for source in sources if source.text] classes = xml_document.findall(".//class") or [] return ( [ clazz for clazz in classes if src_abs_path in [ util.to_unix_path( os.path.join(source.strip(), clazz.get("filename")) ) for source in sources ] ] or [ clazz for clazz in classes if util.to_unix_path(clazz.get("filename")) == src_abs_path ] or [ clazz for clazz in classes if util.to_unix_path(clazz.get("filename")) == src_rel_path ] ) def get_src_path_line_nodes_cobertura(self, xml_document, src_path): classes = self._get_classes(xml_document, src_path) if not classes: return None lines = [clazz.findall("./lines/line") for clazz in classes] return list(itertools.chain(*lines)) @staticmethod def get_src_path_line_nodes_clover(xml_document, src_path): files = [ file_tree for file_tree in xml_document.findall(".//file") if GitPathTool.relative_path(file_tree.get("path")) == src_path ] if not files: return None lines = [] for file_tree in files: lines.append(file_tree.findall('./line[@type="stmt"]')) lines.append(file_tree.findall('./line[@type="cond"]')) return list(itertools.chain(*lines)) def _measured_source_path_matches(self, package_name, file_name, src_path): if not src_path.endswith(file_name): return False norm_src_path = os.path.normcase(src_path) for root in self._src_roots: if ( os.path.normcase( GitPathTool.relative_path( os.path.join(root, package_name, file_name) ) ) == norm_src_path ): return True return False def get_src_path_line_nodes_jacoco(self, xml_document, src_path): files = [] packages = list(xml_document.findall(".//package")) for pkg in packages: _files = [ _file for _file in pkg.findall("sourcefile") if self._measured_source_path_matches( pkg.get("name"), _file.get("name"), src_path ) ] files.extend(_files) if not files: return None lines = [file_tree.findall("./line") for file_tree in files] return list(itertools.chain(*lines))
Apache License 2.0
algobulls/pyalgotrading
pyalgotrading/instrument/instrument.py
Instrument.__init__
python
def __init__(self, segment, exchange, tradingsymbol, broker_token, tick_size, lot_size, expiry=None, strike_price=None): self.segment = segment self.exchange = exchange self.tradingsymbol = tradingsymbol self.broker_token = broker_token self.tick_size = tick_size self.lot_size = lot_size self.expiry = expiry self.strike_price = strike_price
Init method that is used while creating an object of this class Args: segment: segment exchange: exchange tradingsymbol: trading symbol broker_token: alphnumeric token tick_size: tick size lot_size: lot size expiry: expiry date strike_price: strike amount / value
https://github.com/algobulls/pyalgotrading/blob/fd9e353e6f7f3e9f4d271f4a9a7a18b0f9381471/pyalgotrading/instrument/instrument.py#L12-L32
from datetime import date class Instrument:
MIT License
mavlink/mavsdk-python
mavsdk/telemetry_pb2_grpc.py
TelemetryServiceServicer.SubscribeHealth
python
def SubscribeHealth(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Subscribe to 'health' updates.
https://github.com/mavlink/mavsdk-python/blob/ded17f0c12316f20c93f5e1bac4fede2b8b4e446/mavsdk/telemetry_pb2_grpc.py#L420-L425
import grpc from . import telemetry_pb2 as telemetry_dot_telemetry__pb2 class TelemetryServiceStub(object): def __init__(self, channel): self.SubscribePosition = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribePosition', request_serializer=telemetry_dot_telemetry__pb2.SubscribePositionRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.PositionResponse.FromString, ) self.SubscribeHome = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeHome', request_serializer=telemetry_dot_telemetry__pb2.SubscribeHomeRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.HomeResponse.FromString, ) self.SubscribeInAir = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeInAir', request_serializer=telemetry_dot_telemetry__pb2.SubscribeInAirRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.InAirResponse.FromString, ) self.SubscribeLandedState = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeLandedState', request_serializer=telemetry_dot_telemetry__pb2.SubscribeLandedStateRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.LandedStateResponse.FromString, ) self.SubscribeArmed = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeArmed', request_serializer=telemetry_dot_telemetry__pb2.SubscribeArmedRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.ArmedResponse.FromString, ) self.SubscribeVtolState = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeVtolState', request_serializer=telemetry_dot_telemetry__pb2.SubscribeVtolStateRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.VtolStateResponse.FromString, ) self.SubscribeAttitudeQuaternion = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeAttitudeQuaternion', request_serializer=telemetry_dot_telemetry__pb2.SubscribeAttitudeQuaternionRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.AttitudeQuaternionResponse.FromString, ) self.SubscribeAttitudeEuler = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeAttitudeEuler', request_serializer=telemetry_dot_telemetry__pb2.SubscribeAttitudeEulerRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.AttitudeEulerResponse.FromString, ) self.SubscribeAttitudeAngularVelocityBody = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeAttitudeAngularVelocityBody', request_serializer=telemetry_dot_telemetry__pb2.SubscribeAttitudeAngularVelocityBodyRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.AttitudeAngularVelocityBodyResponse.FromString, ) self.SubscribeCameraAttitudeQuaternion = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeCameraAttitudeQuaternion', request_serializer=telemetry_dot_telemetry__pb2.SubscribeCameraAttitudeQuaternionRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.CameraAttitudeQuaternionResponse.FromString, ) self.SubscribeCameraAttitudeEuler = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeCameraAttitudeEuler', request_serializer=telemetry_dot_telemetry__pb2.SubscribeCameraAttitudeEulerRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.CameraAttitudeEulerResponse.FromString, ) self.SubscribeVelocityNed = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeVelocityNed', request_serializer=telemetry_dot_telemetry__pb2.SubscribeVelocityNedRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.VelocityNedResponse.FromString, ) self.SubscribeGpsInfo = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeGpsInfo', request_serializer=telemetry_dot_telemetry__pb2.SubscribeGpsInfoRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.GpsInfoResponse.FromString, ) self.SubscribeRawGps = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeRawGps', request_serializer=telemetry_dot_telemetry__pb2.SubscribeRawGpsRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.RawGpsResponse.FromString, ) self.SubscribeBattery = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeBattery', request_serializer=telemetry_dot_telemetry__pb2.SubscribeBatteryRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.BatteryResponse.FromString, ) self.SubscribeFlightMode = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeFlightMode', request_serializer=telemetry_dot_telemetry__pb2.SubscribeFlightModeRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.FlightModeResponse.FromString, ) self.SubscribeHealth = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeHealth', request_serializer=telemetry_dot_telemetry__pb2.SubscribeHealthRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.HealthResponse.FromString, ) self.SubscribeRcStatus = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeRcStatus', request_serializer=telemetry_dot_telemetry__pb2.SubscribeRcStatusRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.RcStatusResponse.FromString, ) self.SubscribeStatusText = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeStatusText', request_serializer=telemetry_dot_telemetry__pb2.SubscribeStatusTextRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.StatusTextResponse.FromString, ) self.SubscribeActuatorControlTarget = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeActuatorControlTarget', request_serializer=telemetry_dot_telemetry__pb2.SubscribeActuatorControlTargetRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.ActuatorControlTargetResponse.FromString, ) self.SubscribeActuatorOutputStatus = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeActuatorOutputStatus', request_serializer=telemetry_dot_telemetry__pb2.SubscribeActuatorOutputStatusRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.ActuatorOutputStatusResponse.FromString, ) self.SubscribeOdometry = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeOdometry', request_serializer=telemetry_dot_telemetry__pb2.SubscribeOdometryRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.OdometryResponse.FromString, ) self.SubscribePositionVelocityNed = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribePositionVelocityNed', request_serializer=telemetry_dot_telemetry__pb2.SubscribePositionVelocityNedRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.PositionVelocityNedResponse.FromString, ) self.SubscribeGroundTruth = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeGroundTruth', request_serializer=telemetry_dot_telemetry__pb2.SubscribeGroundTruthRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.GroundTruthResponse.FromString, ) self.SubscribeFixedwingMetrics = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeFixedwingMetrics', request_serializer=telemetry_dot_telemetry__pb2.SubscribeFixedwingMetricsRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.FixedwingMetricsResponse.FromString, ) self.SubscribeImu = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeImu', request_serializer=telemetry_dot_telemetry__pb2.SubscribeImuRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.ImuResponse.FromString, ) self.SubscribeScaledImu = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeScaledImu', request_serializer=telemetry_dot_telemetry__pb2.SubscribeScaledImuRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.ScaledImuResponse.FromString, ) self.SubscribeRawImu = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeRawImu', request_serializer=telemetry_dot_telemetry__pb2.SubscribeRawImuRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.RawImuResponse.FromString, ) self.SubscribeHealthAllOk = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeHealthAllOk', request_serializer=telemetry_dot_telemetry__pb2.SubscribeHealthAllOkRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.HealthAllOkResponse.FromString, ) self.SubscribeUnixEpochTime = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeUnixEpochTime', request_serializer=telemetry_dot_telemetry__pb2.SubscribeUnixEpochTimeRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.UnixEpochTimeResponse.FromString, ) self.SubscribeDistanceSensor = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeDistanceSensor', request_serializer=telemetry_dot_telemetry__pb2.SubscribeDistanceSensorRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.DistanceSensorResponse.FromString, ) self.SubscribeScaledPressure = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeScaledPressure', request_serializer=telemetry_dot_telemetry__pb2.SubscribeScaledPressureRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.ScaledPressureResponse.FromString, ) self.SubscribeHeading = channel.unary_stream( '/mavsdk.rpc.telemetry.TelemetryService/SubscribeHeading', request_serializer=telemetry_dot_telemetry__pb2.SubscribeHeadingRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.HeadingResponse.FromString, ) self.SetRatePosition = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRatePosition', request_serializer=telemetry_dot_telemetry__pb2.SetRatePositionRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRatePositionResponse.FromString, ) self.SetRateHome = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateHome', request_serializer=telemetry_dot_telemetry__pb2.SetRateHomeRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateHomeResponse.FromString, ) self.SetRateInAir = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateInAir', request_serializer=telemetry_dot_telemetry__pb2.SetRateInAirRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateInAirResponse.FromString, ) self.SetRateLandedState = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateLandedState', request_serializer=telemetry_dot_telemetry__pb2.SetRateLandedStateRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateLandedStateResponse.FromString, ) self.SetRateVtolState = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateVtolState', request_serializer=telemetry_dot_telemetry__pb2.SetRateVtolStateRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateVtolStateResponse.FromString, ) self.SetRateAttitude = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateAttitude', request_serializer=telemetry_dot_telemetry__pb2.SetRateAttitudeRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateAttitudeResponse.FromString, ) self.SetRateCameraAttitude = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateCameraAttitude', request_serializer=telemetry_dot_telemetry__pb2.SetRateCameraAttitudeRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateCameraAttitudeResponse.FromString, ) self.SetRateVelocityNed = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateVelocityNed', request_serializer=telemetry_dot_telemetry__pb2.SetRateVelocityNedRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateVelocityNedResponse.FromString, ) self.SetRateGpsInfo = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateGpsInfo', request_serializer=telemetry_dot_telemetry__pb2.SetRateGpsInfoRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateGpsInfoResponse.FromString, ) self.SetRateBattery = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateBattery', request_serializer=telemetry_dot_telemetry__pb2.SetRateBatteryRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateBatteryResponse.FromString, ) self.SetRateRcStatus = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateRcStatus', request_serializer=telemetry_dot_telemetry__pb2.SetRateRcStatusRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateRcStatusResponse.FromString, ) self.SetRateActuatorControlTarget = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateActuatorControlTarget', request_serializer=telemetry_dot_telemetry__pb2.SetRateActuatorControlTargetRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateActuatorControlTargetResponse.FromString, ) self.SetRateActuatorOutputStatus = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateActuatorOutputStatus', request_serializer=telemetry_dot_telemetry__pb2.SetRateActuatorOutputStatusRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateActuatorOutputStatusResponse.FromString, ) self.SetRateOdometry = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateOdometry', request_serializer=telemetry_dot_telemetry__pb2.SetRateOdometryRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateOdometryResponse.FromString, ) self.SetRatePositionVelocityNed = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRatePositionVelocityNed', request_serializer=telemetry_dot_telemetry__pb2.SetRatePositionVelocityNedRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRatePositionVelocityNedResponse.FromString, ) self.SetRateGroundTruth = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateGroundTruth', request_serializer=telemetry_dot_telemetry__pb2.SetRateGroundTruthRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateGroundTruthResponse.FromString, ) self.SetRateFixedwingMetrics = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateFixedwingMetrics', request_serializer=telemetry_dot_telemetry__pb2.SetRateFixedwingMetricsRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateFixedwingMetricsResponse.FromString, ) self.SetRateImu = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateImu', request_serializer=telemetry_dot_telemetry__pb2.SetRateImuRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateImuResponse.FromString, ) self.SetRateScaledImu = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateScaledImu', request_serializer=telemetry_dot_telemetry__pb2.SetRateScaledImuRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateScaledImuResponse.FromString, ) self.SetRateRawImu = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateRawImu', request_serializer=telemetry_dot_telemetry__pb2.SetRateRawImuRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateRawImuResponse.FromString, ) self.SetRateUnixEpochTime = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateUnixEpochTime', request_serializer=telemetry_dot_telemetry__pb2.SetRateUnixEpochTimeRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateUnixEpochTimeResponse.FromString, ) self.SetRateDistanceSensor = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/SetRateDistanceSensor', request_serializer=telemetry_dot_telemetry__pb2.SetRateDistanceSensorRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.SetRateDistanceSensorResponse.FromString, ) self.GetGpsGlobalOrigin = channel.unary_unary( '/mavsdk.rpc.telemetry.TelemetryService/GetGpsGlobalOrigin', request_serializer=telemetry_dot_telemetry__pb2.GetGpsGlobalOriginRequest.SerializeToString, response_deserializer=telemetry_dot_telemetry__pb2.GetGpsGlobalOriginResponse.FromString, ) class TelemetryServiceServicer(object): def SubscribePosition(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeHome(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeInAir(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeLandedState(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeArmed(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeVtolState(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeAttitudeQuaternion(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeAttitudeEuler(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeAttitudeAngularVelocityBody(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeCameraAttitudeQuaternion(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeCameraAttitudeEuler(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeVelocityNed(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeGpsInfo(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeRawGps(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeBattery(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def SubscribeFlightMode(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
BSD 3-Clause New or Revised License
fkie/multimaster_fkie
fkie_master_sync/src/fkie_master_sync/__init__.py
set_process_name
python
def set_process_name(name): try: from ctypes import cdll, byref, create_string_buffer libc = cdll.LoadLibrary('libc.so.6') buff = create_string_buffer(len(name) + 1) buff.value = name libc.prctl(15, byref(buff), 0, 0, 0) except Exception: try: import setproctitle setproctitle.setproctitle(name) except Exception: pass
Change the process name. @param name: New process name @type name: C{str}
https://github.com/fkie/multimaster_fkie/blob/386ebf27f41bffdb1896bbcfdccb7c5290ac0eb4/fkie_master_sync/src/fkie_master_sync/__init__.py#L55-L72
import sys import roslib import rospy from . import master_sync PROCESS_NAME = "master_sync" def set_terminal_name(name): sys.stdout.write("\x1b]2;%s\x07" % name)
BSD 3-Clause New or Revised License
devopshq/teamcity
dohq_teamcity/api/group_api.py
GroupApi.delete_role
python
def delete_role(self, group_locator, role_id, scope, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__delete_role_with_http_info(group_locator, role_id, scope, **kwargs) else: (data) = self.__delete_role_with_http_info(group_locator, role_id, scope, **kwargs) return data
delete_role # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_role(group_locator, role_id, scope, async_req=True) >>> result = thread.get() :param async_req: bool :param str group_locator: (required) :param str role_id: (required) :param str scope: (required) :return: None If the method is called asynchronously, returns the request thread.
https://github.com/devopshq/teamcity/blob/84f1757ec1fddef27d39246a75739d047be0e831/dohq_teamcity/api/group_api.py#L128-L149
from __future__ import absolute_import from dohq_teamcity.custom.base_model import TeamCityObject import re import six from dohq_teamcity.models.group import Group from dohq_teamcity.models.groups import Groups from dohq_teamcity.models.properties import Properties from dohq_teamcity.models.role import Role from dohq_teamcity.models.roles import Roles class GroupApi(object): base_name = 'Group' def __init__(self, api_client=None): self.api_client = api_client def add_group(self, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__add_group_with_http_info(**kwargs) else: (data) = self.__add_group_with_http_info(**kwargs) return data def add_role(self, group_locator, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__add_role_with_http_info(group_locator, **kwargs) else: (data) = self.__add_role_with_http_info(group_locator, **kwargs) return data def add_role_simple(self, group_locator, role_id, scope, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__add_role_simple_with_http_info(group_locator, role_id, scope, **kwargs) else: (data) = self.__add_role_simple_with_http_info(group_locator, role_id, scope, **kwargs) return data def delete_group(self, group_locator, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.__delete_group_with_http_info(group_locator, **kwargs) else: (data) = self.__delete_group_with_http_info(group_locator, **kwargs) return data
MIT License
google-research/tensorflow_constrained_optimization
tensorflow_constrained_optimization/python/train/constrained_optimizer.py
ConstrainedOptimizerV2.trainable_variables
python
def trainable_variables(self): return [variable for variable in self.variables() if variable.trainable]
Returns a list of trainable variables owned by this optimizer. The returned variables will only be those that are owned by the constrained optimizer itself, or transitively by objects that it owns. These include the variables owned by the wrapped optimizer and constraint_optimizer, and the constrained formulation's internal state variable (e.g. Lagrange multipliers, for the Lagrangian formulation). Returns: A list of variables.
https://github.com/google-research/tensorflow_constrained_optimization/blob/723d63f8567aaa988c4ce4761152beee2b462e1d/tensorflow_constrained_optimization/python/train/constrained_optimizer.py#L651-L663
from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import six import tensorflow as tf from tensorflow_constrained_optimization.python import constrained_minimization_problem @six.add_metaclass(abc.ABCMeta) class Formulation(tf.Module): def __init__(self, name=None): super(Formulation, self).__init__(name=name) @abc.abstractmethod def state(self): @abc.abstractmethod def create_state(self, num_constraints): @abc.abstractproperty def is_state_created(self): @abc.abstractmethod def get_loss_fn(self, minimization_problem): class ConstrainedOptimizerV1(tf.compat.v1.train.Optimizer): def __init__(self, formulation, optimizer, num_constraints=None, constraint_optimizer=None, name="ConstrainedOptimizerV1"): super(ConstrainedOptimizerV1, self).__init__(use_locking=False, name=name) if (isinstance(optimizer, tf.keras.optimizers.Optimizer) or isinstance(constraint_optimizer, tf.keras.optimizers.Optimizer)): raise TypeError("a V1 constrained optimizer must be constructed from a " "V1 optimizer and (optionally) constraint_optimizer " "(i.e. implementations of tf.compat.v1.train.Optimizer)") self._formulation = formulation self._optimizer = optimizer self._constraint_optimizer = constraint_optimizer self._num_constraints = num_constraints @property def num_constraints(self): return self._num_constraints @num_constraints.setter def num_constraints(self, num_constraints): if self._formulation.is_state_created: raise RuntimeError("num_constraints cannot be set after the internal " "state has been created (by e.g. the variables or " "minimize methods)") if (self._num_constraints is not None) and (num_constraints != self._num_constraints): raise ValueError("num_constraints cannot be changed once it has been set") self._num_constraints = num_constraints def variables(self): if not self._formulation.is_state_created: if self._num_constraints is None: raise RuntimeError("the variables method of a TFCO optimizer cannot " "be called before the number of constraints has " "been fixed (maybe you need to set num_constraints)") self._formulation.create_state(self._num_constraints) result = ( list(self._optimizer.variables()) + list(self._formulation.variables)) if self._constraint_optimizer is not None: result += list(self._constraint_optimizer.variables()) return result def trainable_variables(self): return [variable for variable in self.variables() if variable.trainable] def non_trainable_variables(self): return [variable for variable in self.variables() if not variable.trainable] def compute_gradients(self, loss, var_list=None, gate_gradients=tf.compat.v1.train.Optimizer.GATE_OP, aggregation_method=None, colocate_gradients_with_ops=False, grad_loss=None): if not isinstance( loss, constrained_minimization_problem.ConstrainedMinimizationProblem): return super(ConstrainedOptimizerV1, self).compute_gradients( loss, var_list=var_list, gate_gradients=gate_gradients, aggregation_method=aggregation_method, colocate_gradients_with_ops=colocate_gradients_with_ops, grad_loss=grad_loss) if (not self._formulation.is_state_created) and (self._num_constraints is not None): self._formulation.create_state(self._num_constraints) if grad_loss is not None: raise ValueError("the grad_loss argument cannot be provided when the " "loss argument is a ConstrainedMinimizationProblem") with tf.control_dependencies(loss.update_ops()): loss = self._formulation.get_loss_fn(loss) if not tf.executing_eagerly(): loss = loss() return super(ConstrainedOptimizerV1, self).compute_gradients( loss, var_list=var_list, gate_gradients=gate_gradients, aggregation_method=aggregation_method, colocate_gradients_with_ops=colocate_gradients_with_ops) def _create_slots(self, var_list): if not self._formulation.is_state_created: raise RuntimeError("a ConstrainedOptimizerV1 must know the number of " "constraints before its variables can be accessed " "(maybe you need to set num_constraints)") state_var_ids = [id(var) for var in self._formulation.variables] if self._constraint_optimizer is None or not state_var_ids: return self._optimizer._create_slots(var_list) state_var_list = [] non_state_var_list = [] for var in var_list: if id(var) in state_var_ids: state_var_list.append(var) else: non_state_var_list.append(var) self._optimizer._create_slots(non_state_var_list) self._constraint_optimizer._create_slots(state_var_list) def _prepare(self): self._optimizer._prepare() if self._constraint_optimizer is not None: self._constraint_optimizer._prepare() def _apply_dense(self, gradient, variable, *args, **kwargs): assert variable is not None if not self._formulation.is_state_created: raise RuntimeError("a ConstrainedOptimizerV1 must know the number of " "constraints before its variables can be accessed " "(maybe you need to set num_constraints)") state_var_ids = [id(var) for var in self._formulation.variables] if self._constraint_optimizer is not None and id(variable) in state_var_ids: return self._constraint_optimizer._apply_dense(gradient, variable, *args, **kwargs) return self._optimizer._apply_dense(gradient, variable, *args, **kwargs) def _apply_sparse(self, gradient, variable, *args, **kwargs): assert variable is not None if not self._formulation.is_state_created: raise RuntimeError("a ConstrainedOptimizerV1 must know the number of " "constraints before its variables can be accessed " "(maybe you need to set num_constraints)") state_var_ids = [id(var) for var in self._formulation.variables] if self._constraint_optimizer is not None and id(variable) in state_var_ids: return self._constraint_optimizer._apply_sparse(gradient, variable, *args, **kwargs) return self._optimizer._apply_sparse(gradient, variable, *args, **kwargs) def _resource_apply_dense(self, gradient, handle, *args, **kwargs): assert handle is not None if not self._formulation.is_state_created: raise RuntimeError("a ConstrainedOptimizerV1 must know the number of " "constraints before its variables can be accessed " "(maybe you need to set num_constraints)") state_vars = self._formulation.variables if self._constraint_optimizer is not None and any( handle is vv for vv in state_vars): return self._constraint_optimizer._resource_apply_dense( gradient, handle, *args, **kwargs) return self._optimizer._resource_apply_dense(gradient, handle, *args, **kwargs) def _resource_apply_sparse(self, gradient, handle, *args, **kwargs): assert handle is not None if not self._formulation.is_state_created: raise RuntimeError("a ConstrainedOptimizerV1 must know the number of " "constraints before its variables can be accessed " "(maybe you need to set num_constraints)") state_vars = self._formulation.variables if self._constraint_optimizer is not None and any( handle is vv for vv in state_vars): return self._constraint_optimizer._resource_apply_sparse( gradient, handle, *args, **kwargs) return self._optimizer._resource_apply_sparse(gradient, handle, *args, **kwargs) class ConstrainedOptimizerV2(tf.keras.optimizers.Optimizer): def __init__(self, formulation, optimizer, num_constraints=None, constraint_optimizer=None, name="ConstrainedOptimizerV2"): super(ConstrainedOptimizerV2, self).__init__(name=name) if (isinstance(optimizer, tf.compat.v1.train.Optimizer) or isinstance(constraint_optimizer, tf.compat.v1.train.Optimizer)): raise TypeError("a V2 constrained optimizer must be constructed from a " "V2 optimizer and (optionally) constraint_optimizer " "(i.e. implementations of tf.keras.optimizers.Optimizer)") self._formulation = formulation self._optimizer = optimizer self._constraint_optimizer = constraint_optimizer self._num_constraints = num_constraints @property def num_constraints(self): return self._num_constraints @num_constraints.setter def num_constraints(self, num_constraints): if self._formulation.is_state_created: raise RuntimeError("num_constraints cannot be set after the internal " "state has been created (by e.g. the variables or " "minimize methods)") if (self._num_constraints is not None) and (num_constraints != self._num_constraints): raise ValueError("num_constraints cannot be changed once it has been set") self._num_constraints = num_constraints def variables(self): if not self._formulation.is_state_created: if self._num_constraints is None: raise RuntimeError("the variables method of a TFCO optimizer cannot " "be called before the number of constraints has " "been fixed (maybe you need to call the " "num_constraints setter?)") self._formulation.create_state(self._num_constraints) result = ( list(self._optimizer.variables()) + list(self._formulation.variables)) if self._constraint_optimizer is not None: result += list(self._constraint_optimizer.variables()) return result
Apache License 2.0
astooke/rlpyt
rlpyt/runners/async_rl.py
AsyncRlBase.startup
python
def startup(self): if self.seed is None: self.seed = make_seed() set_seed(self.seed) double_buffer, examples = self.sampler.async_initialize( agent=self.agent, bootstrap_value=getattr(self.algo, "bootstrap_value", False), traj_info_kwargs=self.get_traj_info_kwargs(), seed=self.seed, ) self.sampler_batch_size = self.sampler.batch_spec.size self.world_size = len(self.affinity.optimizer) n_itr = self.get_n_itr() replay_buffer = self.algo.async_initialize( agent=self.agent, sampler_n_itr=n_itr, batch_spec=self.sampler.batch_spec, mid_batch_reset=self.sampler.mid_batch_reset, examples=examples, world_size=self.world_size, ) self.launch_workers(n_itr, double_buffer, replay_buffer) throttle_itr, delta_throttle_itr = self.optim_startup() return throttle_itr, delta_throttle_itr
Calls ``sampler.async_initialize()`` to get a double buffer for minibatches, followed by ``algo.async_initialize()`` to get a replay buffer on shared memory, then launches all workers (sampler, optimizer, memory copier).
https://github.com/astooke/rlpyt/blob/f04f23db1eb7b5915d88401fca67869968a07a37/rlpyt/runners/async_rl.py#L134-L162
import time import multiprocessing as mp import psutil import torch from collections import deque import math from rlpyt.runners.base import BaseRunner from rlpyt.utils.quick_args import save__init__args from rlpyt.utils.logging import logger from rlpyt.utils.collections import AttrDict from rlpyt.utils.seed import set_seed, make_seed from rlpyt.utils.prog_bar import ProgBarCounter from rlpyt.utils.synchronize import drain_queue, find_port THROTTLE_WAIT = 0.05 class AsyncRlBase(BaseRunner): _eval = False def __init__( self, algo, agent, sampler, n_steps, affinity, seed=None, log_interval_steps=1e5, ): n_steps = int(n_steps) log_interval_steps = int(log_interval_steps) save__init__args(locals()) def train(self): throttle_itr, delta_throttle_itr = self.startup() throttle_time = 0. sampler_itr = itr = 0 if self._eval: while self.ctrl.sampler_itr.value < 1: time.sleep(THROTTLE_WAIT) traj_infos = drain_queue(self.traj_infos_queue, n_sentinel=1) self.store_diagnostics(0, 0, traj_infos, ()) self.log_diagnostics(0, 0, 0) log_counter = 0 while True: logger.set_iteration(itr) with logger.prefix(f"opt_itr #{itr} "): while self.ctrl.sampler_itr.value < throttle_itr: if self.ctrl.quit.value: break time.sleep(THROTTLE_WAIT) throttle_time += THROTTLE_WAIT if self.ctrl.quit.value: break if self.ctrl.opt_throttle is not None: self.ctrl.opt_throttle.wait() throttle_itr += delta_throttle_itr opt_info = self.algo.optimize_agent(itr, sampler_itr=self.ctrl.sampler_itr.value) self.agent.send_shared_memory() sampler_itr = self.ctrl.sampler_itr.value traj_infos = (list() if self._eval else drain_queue(self.traj_infos_queue)) self.store_diagnostics(itr, sampler_itr, traj_infos, opt_info) if (sampler_itr // self.log_interval_itrs > log_counter): if self._eval: with self.ctrl.sampler_itr.get_lock(): traj_infos = drain_queue(self.traj_infos_queue, n_sentinel=1) self.store_diagnostics(itr, sampler_itr, traj_infos, ()) self.log_diagnostics(itr, sampler_itr, throttle_time) log_counter += 1 throttle_time = 0. itr += 1 sampler_itr = self.ctrl.sampler_itr.value traj_infos = drain_queue(self.traj_infos_queue) if traj_infos or not self._eval: self.store_diagnostics(itr, sampler_itr, traj_infos, ()) self.log_diagnostics(itr, sampler_itr, throttle_time) self.shutdown()
MIT License
evrenesat/ganihomes
rosetta/polib.py
_BaseFile.__init__
python
def __init__(self, *args, **kwargs): list.__init__(self) pofile = kwargs.get('pofile', None) if pofile and os.path.exists(pofile): self.fpath = pofile else: self.fpath = kwargs.get('fpath') self.wrapwidth = kwargs.get('wrapwidth', 78) self.encoding = kwargs.get('encoding', default_encoding) self.check_for_duplicates = kwargs.get('check_for_duplicates', False) self.header = '' self.metadata = {} self.metadata_is_fuzzy = 0
Constructor, accepts the following keyword arguments: ``pofile`` string, the path to the po or mo file, or its content as a string. ``wrapwidth`` integer, the wrap width, only useful when the ``-w`` option was passed to xgettext (optional, default: ``78``). ``encoding`` string, the encoding to use, defaults to ``default_encoding`` global variable (optional). ``check_for_duplicates`` whether to check for duplicate entries when adding entries to the file, (optional, default: ``False``).
https://github.com/evrenesat/ganihomes/blob/eece2d8d957989b176cc5a36d723f676862f8d17/rosetta/polib.py#L215-L251
__author__ = 'David Jean Louis <izimobil@gmail.com>' __version__ = '0.6.2' __all__ = ['pofile', 'POFile', 'POEntry', 'mofile', 'MOFile', 'MOEntry', 'detect_encoding', 'escape', 'unescape', 'detect_encoding',] import array import codecs import os import re import struct import sys import textwrap import types default_encoding = 'utf-8' def _pofile_or_mofile(f, type, **kwargs): if kwargs.get('autodetect_encoding', True): enc = detect_encoding(f, type == 'mofile') else: enc = kwargs.get('encoding', default_encoding) kls = type == 'pofile' and _POFileParser or _MOFileParser parser = kls( f, encoding=enc, check_for_duplicates=kwargs.get('check_for_duplicates', False) ) instance = parser.parse() instance.wrapwidth = kwargs.get('wrapwidth', 78) return instance def pofile(pofile, **kwargs): return _pofile_or_mofile(pofile, 'pofile', **kwargs) def mofile(mofile, **kwargs): return _pofile_or_mofile(mofile, 'mofile', **kwargs) def detect_encoding(file, binary_mode=False): rx = re.compile(r'"?Content-Type:.+? charset=([\w_\-:\.]+)') def charset_exists(charset): try: codecs.lookup(charset) except LookupError: return False return True if not os.path.exists(file): match = rx.search(file) if match: enc = match.group(1).strip() if charset_exists(enc): return enc else: if binary_mode: mode = 'rb' else: mode = 'r' f = open(file, mode) for l in f.readlines(): match = rx.search(l) if match: f.close() enc = match.group(1).strip() if charset_exists(enc): return enc f.close() return default_encoding def escape(st): return st.replace('\\', r'\\') .replace('\t', r'\t') .replace('\r', r'\r') .replace('\n', r'\n') .replace('\"', r'\"') def unescape(st): def unescape_repl(m): m = m.group(1) if m == 'n': return '\n' if m == 't': return '\t' if m == 'r': return '\r' if m == '\\': return '\\' return m return re.sub(r'\\(\\|n|t|r|")', unescape_repl, st) class _BaseFile(list):
BSD 2-Clause Simplified License
mapbox/mapbox-sdk-py
mapbox/services/maps.py
Maps._validate_marker_name
python
def _validate_marker_name(self, marker_name): if marker_name not in self.valid_marker_names: raise InvalidMarkerNameError( "{} is not a valid marker name".format(marker_name) ) return marker_name
Validates marker name, raising error if invalid.
https://github.com/mapbox/mapbox-sdk-py/blob/e56127b96eea150831c84b67a779378598790fd4/mapbox/services/maps.py#L154-L162
from re import ( compile, match ) from mapbox.errors import ( ValidationError, InvalidZoomError, InvalidColumnError, InvalidRowError, InvalidFileFormatError, InvalidPeriodError, InvalidFeatureFormatError, InvalidMarkerNameError, InvalidLabelError, InvalidColorError ) from mapbox.services.base import Service from dateutil.parser import parse from uritemplate import URITemplate class Maps(Service): api_name = "maps" api_version = "v4" valid_file_formats = [ "grid.json", "mvt", "png", "png32", "png64", "png128", "png256", "jpg70", "jpg80", "jpg90" ] valid_feature_formats = [ "json", "kml" ] valid_marker_names = [ "pin-s", "pin-l" ] @property def base_uri(self): return "https://{}/{}".format(self.host, self.api_version) def _validate_z(self, z): if (z < 0) or (z > 20): raise InvalidZoomError( "{} is not a valid value for z (zoom)".format(z) ) return z def _validate_x(self, x, z): if (x < 0) or (x > ((2**z) - 1)): raise InvalidColumnError( "{} is not a valid value for x (column)".format(x) ) return x def _validate_y(self, y, z): if (y < 0) or (y > ((2**z) - 1)): raise InvalidRowError( "{} is not a valid value for y (row)".format(y) ) return y def _validate_retina(self, retina): if retina: retina = "@2x" else: retina = "" return retina def _validate_file_format(self, file_format): if file_format not in self.valid_file_formats: raise InvalidFileFormatError( "{} is not a valid file format".format(file_format) ) return file_format def _validate_timestamp(self, timestamp): try: parse(timestamp) except: raise InvalidPeriodError( "{} is not an ISO-formatted string".format(timestamp) ) return timestamp def _validate_feature_format(self, feature_format): if feature_format not in self.valid_feature_formats: raise InvalidFeatureFormatError( "{} is not a valid feature format".format(feature_format) ) return feature_format
MIT License
nttcom/eclcli
eclcli/compute/client.py
check_api_version
python
def check_api_version(check_version): try: from novaclient import api_versions except ImportError: return False import novaclient global _compute_api_version _compute_api_version = api_versions.get_api_version(check_version) if _compute_api_version > api_versions.APIVersion("2.0"): if not _compute_api_version.matches( novaclient.API_MIN_VERSION, novaclient.API_MAX_VERSION, ): raise exceptions.CommandError( "versions supported by client: %s - %s" % ( novaclient.API_MIN_VERSION.get_string(), novaclient.API_MAX_VERSION.get_string(), ), ) return True
Validate version supplied by user Returns: * True if version is OK * False if the version has not been checked and the previous plugin check should be performed * throws an exception if the version is no good TODO(dtroyer): make the exception thrown a version-related one
https://github.com/nttcom/eclcli/blob/25946165882b352c16df4077f5470d3c5e4b910e/eclcli/compute/client.py#L81-L120
import logging from eclcli.common import exceptions from eclcli.common import utils LOG = logging.getLogger(__name__) DEFAULT_API_VERSION = '2' API_VERSION_OPTION = 'os_compute_api_version' API_NAME = 'compute' API_VERSIONS = { "2": "novaclient.client", } _compute_api_version = None def make_client(instance): from novaclient import client as nova_client if _compute_api_version is not None: version = _compute_api_version else: version = instance._api_version[API_NAME] LOG.debug('Instantiating compute client for V%s', version) http_log_debug = utils.get_effective_log_level() <= logging.DEBUG extensions = [ext for ext in nova_client.discover_extensions(version) if ext.name == "list_extensions"] kwargs = utils.build_kwargs_dict('endpoint_type', instance._interface) client = nova_client.Client( version, session=instance.session, extensions=extensions, http_log_debug=http_log_debug, timings=instance.timing, region_name=instance._region_name, **kwargs ) return client def build_option_parser(parser): return parser
Apache License 2.0
capitalone/particle-cloud-framework
pcf/particle/aws/s3/s3_bucket.py
S3Bucket._start
python
def _start(self): create_definition = pcf_util.param_filter(self.get_desired_state_definition(), S3Bucket.START_PARAMS_FILTER) response = self.client.create_bucket(**create_definition) if self.custom_config.get("Tags"): tags = self.custom_config.get("Tags") tag_set = [] for k, v in tags.items(): tag_set.append({ "Key": k, "Value": v }) self.client.put_bucket_tagging( Bucket=self.bucket_name, Tagging={ "TagSet":tag_set } ) return response
Creates the S3 bucket Adds Tags to the S3 bucket if specified in custom_config Returns: response of boto3 create_bucket
https://github.com/capitalone/particle-cloud-framework/blob/a05713434572d9d528d724855097854c5a56d377/pcf/particle/aws/s3/s3_bucket.py#L81-L107
from pcf.core import State from pcf.util import pcf_util from pcf.core.aws_resource import AWSResource class S3Bucket(AWSResource): flavor = "s3_bucket" state_lookup = { "missing": State.terminated, "active": State.running, "inactive": State.terminated } equivalent_states = { State.running: 1, State.stopped: 0, State.terminated: 0 } START_PARAMS_FILTER = { "ACL", "Bucket", "CreateBucketConfiguration", "GrantFullControl", "GrantRead", "GrantReadACL", "GrantReadACP", "GrantWrite", "GrantWriteACP", } UNIQUE_KEYS = ["aws_resource.Bucket"] def __init__(self, particle_definition, session=None): super().__init__(particle_definition=particle_definition, resource_name="s3", session=session) self.bucket_name = self.desired_state_definition["Bucket"] self._set_unique_keys() def _set_unique_keys(self): self.unique_keys = S3Bucket.UNIQUE_KEYS def get_status(self): bucket_object = self.resource.Bucket(self.bucket_name) if bucket_object.creation_date: return {"status":"active"} else: return {"status": "missing"} def _terminate(self): return self.client.delete_bucket(Bucket=self.bucket_name)
Apache License 2.0
eventbrite/pysoa
pysoa/server/action/status.py
StatusActionFactory
python
def StatusActionFactory(version, build=None, base_class=BaseStatusAction): class StatusAction(base_class): @property def _version(self): return version @property def _build(self): return build return StatusAction
A factory for creating a new status action class specific to a service. :param version: The service version. :param build: The optional service build identifier. :param base_class: The optional base class, to override `BaseStatusAction` as the base class. :return: A class named `StatusAction`, extending `base_class`, with `_version` and `_build` properties returning the corresponding `version` and `build` input parameters, respectively.
https://github.com/eventbrite/pysoa/blob/8ab18b43f533574f2b235c734da309ab21176957/pysoa/server/action/status.py#L300-L321
from __future__ import ( absolute_import, unicode_literals, ) import abc import platform import sys from typing import ( Any, Callable, Dict, Generator, Iterable, List, NamedTuple, Optional, Tuple, Type, ) import conformity from conformity import fields import six import pysoa from pysoa.server.action import Action from pysoa.server.server import Server from pysoa.server.settings import ServerSettings from pysoa.server.types import EnrichedActionRequest __all__ = ( 'BaseStatusAction', 'CheckMethodReturn', 'CheckMethodStatus', 'StatusActionFactory', 'make_default_status_action_class', ) CheckMethodStatus = NamedTuple( 'CheckMethodStatus', ( ('is_error', bool), ('code', six.text_type), ('description', six.text_type), ), ) CheckMethodReturn = Optional[Iterable[CheckMethodStatus]] class BaseStatusAction(Action): def __init__(self, settings=None): super(BaseStatusAction, self).__init__(settings) self.diagnostics = {} if six.PY2: @abc.abstractproperty def _version(self): else: @property @abc.abstractmethod def _version(self): @property def _build(self): return None description = ( 'Returns version info for the service, Python, PySOA, and Conformity. If the service has a build string, that ' 'is also returned. If the service has defined additional health check behavior and the `verbose` request ' 'attribute is not set to `False`, those additional health checks are performed and returned in the ' '`healthcheck` response attribute. If the `verbose` request attribute is set to `False`, the additional ' 'health checks are not performed and `healthcheck` is not included in the response (importantly, the `check_` ' 'methods are not invoked).' ) request_schema = fields.Dictionary( { 'verbose': fields.Boolean( description='If specified and False, this instructs the status action to return only the baseline ' 'status information (Python, service, PySOA, and other library versions) and omit any of ' 'the health check operations (no `healthcheck` attribute will be included in the ' 'response). This provides a useful way to obtain the service version very quickly without ' 'executing the often time-consuming code necessary for the full health check. It defaults ' 'to True, which means "return everything."', ), }, optional_keys=('verbose', ), ) response_schema = fields.Dictionary( { 'build': fields.UnicodeString(description='The version build string, if applicable.'), 'conformity': fields.UnicodeString(description='The version of Conformity in use.'), 'healthcheck': fields.Dictionary( { 'warnings': fields.List( fields.Tuple( fields.UnicodeString(description='The invariant warning code'), fields.UnicodeString(description='The readable warning description'), ), description='A list of any warnings encountered during the health checks.', ), 'errors': fields.List( fields.Tuple( fields.UnicodeString(description='The invariant error code'), fields.UnicodeString(description='The readable error description'), ), description='A list of any errors encountered during the health checks.', ), 'diagnostics': fields.SchemalessDictionary( key_type=fields.UnicodeString(), description='A dictionary containing any additional diagnostic information output by the ' 'health check operations.', ), }, optional_keys=('warnings', 'errors', 'diagnostics'), description='Information about any additional health check operations performed.', ), 'pysoa': fields.UnicodeString(description='The version of PySOA in use.'), 'python': fields.UnicodeString(description='The version of Python in use.'), 'version': fields.UnicodeString(description='The version of the responding service.'), }, optional_keys=('build', 'healthcheck', ), ) def run(self, request): status = { 'conformity': six.text_type(conformity.__version__), 'pysoa': six.text_type(pysoa.__version__), 'python': six.text_type(platform.python_version()), 'version': self._version, } if self._build: status['build'] = self._build if not request.body or request.body.get('verbose', True) is True: errors = [] warnings = [] self.diagnostics = {} check_methods = ( getattr(self, x) for x in dir(self) if x.startswith('check_') ) for check_method in check_methods: try: problems = check_method(request) except TypeError as e: raise RuntimeError( 'Status action check_* methods must accept a single argument of type EnrichedActionRequest', e, ) if problems: try: for is_error, code, description in problems: if is_error: errors.append((code, description)) else: warnings.append((code, description)) except (TypeError, ValueError) as e: raise RuntimeError( 'Status action check_* methods must return None or an iterable of bool-str-str tuples.', e, ) status['healthcheck'] = { 'errors': errors, 'warnings': warnings, 'diagnostics': self.diagnostics, } return status def _check_client_settings(self, request): if not request.client.settings: return None self.diagnostics['services'] = {} service_names = list(six.iterkeys(request.client.settings)) try: job_responses = request.client.call_jobs_parallel( [ {'service_name': service_name, 'actions': [{'action': 'status', 'body': {'verbose': False}}]} for service_name in service_names ], timeout=2, catch_transport_errors=True, raise_action_errors=False, raise_job_errors=False, ) except Exception as e: return [CheckMethodStatus(True, 'CHECK_SERVICES_UNKNOWN_ERROR', six.text_type(e))] problems = [] for i, service_name in enumerate(service_names): response = job_responses[i] if isinstance(response, Exception): problems.append(CheckMethodStatus( True, '{}_TRANSPORT_ERROR'.format(service_name.upper()), six.text_type(response) )) elif response.errors: problems.append(CheckMethodStatus( True, '{}_CALL_ERROR'.format(service_name.upper()), six.text_type(response.errors), )) elif response.actions[0].errors: problems.append(CheckMethodStatus( True, '{}_STATUS_ERROR'.format(service_name.upper()), six.text_type(response.actions[0].errors) )) else: self.diagnostics['services'][service_name] = response.actions[0].body return problems
Apache License 2.0
free2er/certbot-regru
certbot_regru/dns.py
_RegRuClient.del_txt_record
python
def del_txt_record(self, record_name, record_content): data = self._create_params(record_name, { 'record_type': 'TXT', 'content': record_content }) try: logger.debug('Attempting to delete record: %s', data) response = self.http.send('https://api.reg.ru/api/regru2/zone/remove_record', data) except requests.exceptions.RequestException as e: logger.warning('Encountered error deleting TXT record: %s', e) return if 'result' not in response or response['result'] != 'success': logger.warning('Encountered error deleting TXT record: %s', response) return logger.debug('Successfully deleted TXT record.')
Delete a TXT record using the supplied information. Note that both the record's name and content are used to ensure that similar records created concurrently (e.g., due to concurrent invocations of this plugin) are not deleted. Failures are logged, but not raised. :param str record_name: The record name (typically beginning with '_acme-challenge.'). :param str record_content: The record content (typically the challenge validation).
https://github.com/free2er/certbot-regru/blob/2b9c83491ba633b5aca6f4e17c3940db44d720fb/certbot_regru/dns.py#L98-L124
import logging import json import requests import zope.interface from certbot import errors from certbot import interfaces from certbot.plugins import dns_common logger = logging.getLogger(__name__) @zope.interface.implementer(interfaces.IAuthenticator) @zope.interface.provider(interfaces.IPluginFactory) class Authenticator(dns_common.DNSAuthenticator): description = 'Obtain certificates using a DNS TXT record (if you are using Reg.ru for DNS).' def __init__(self, *args, **kwargs): super(Authenticator, self).__init__(*args, **kwargs) self.credentials = None @classmethod def add_parser_arguments(cls, add): super(Authenticator, cls).add_parser_arguments(add, default_propagation_seconds=120) add('credentials', help='Path to Reg.ru credentials INI file', default='/etc/letsencrypt/regru.ini') def more_info(self): return 'This plugin configures a DNS TXT record to respond to a dns-01 challenge using ' + 'the Reg.ru API.' def _setup_credentials(self): self.credentials = self._configure_credentials( 'credentials', 'path to Reg.ru credentials INI file', { 'username': 'Username of the Reg.ru account.', 'password': 'Password of the Reg.ru account.', } ) def _perform(self, domain, validation_name, validation): self._get_regru_client().add_txt_record(validation_name, validation) def _cleanup(self, domain, validation_name, validation): self._get_regru_client().del_txt_record(validation_name, validation) def _get_regru_client(self): return _RegRuClient(self.credentials.conf('username'), self.credentials.conf('password')) class _RegRuClient(object): def __init__(self, username, password): self.http = _HttpClient() self.options = { 'username': username, 'password': password, 'io_encoding': 'utf8', 'show_input_params': 1, 'output_format': 'json', 'input_format': 'json', } def add_txt_record(self, record_name, record_content): data = self._create_params(record_name, {'text': record_content}) try: logger.debug('Attempting to add record: %s', data) response = self.http.send('https://api.reg.ru/api/regru2/zone/add_txt', data) except requests.exceptions.RequestException as e: logger.error('Encountered error adding TXT record: %d %s', e, e) raise errors.PluginError('Error communicating with the Reg.ru API: {0}'.format(e)) if 'result' not in response or response['result'] != 'success': logger.error('Encountered error adding TXT record: %s', response) raise errors.PluginError('Error communicating with the Reg.ru API: {0}'.format(response)) logger.debug('Successfully added TXT record')
MIT License
wavefronthq/python-client
wavefront_api_client/models/paged_maintenance_window.py
PagedMaintenanceWindow.cursor
python
def cursor(self, cursor): self._cursor = cursor
Sets the cursor of this PagedMaintenanceWindow. The id at which the current (limited) search can be continued to obtain more matching items # noqa: E501 :param cursor: The cursor of this PagedMaintenanceWindow. # noqa: E501 :type: str
https://github.com/wavefronthq/python-client/blob/e410ce0dd8a2334e995456f4f3d44e0f04664a3a/wavefront_api_client/models/paged_maintenance_window.py#L97-L106
import pprint import re import six from wavefront_api_client.configuration import Configuration class PagedMaintenanceWindow(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'cursor': 'str', 'items': 'list[MaintenanceWindow]', 'limit': 'int', 'more_items': 'bool', 'offset': 'int', 'sort': 'Sorting', 'total_items': 'int' } attribute_map = { 'cursor': 'cursor', 'items': 'items', 'limit': 'limit', 'more_items': 'moreItems', 'offset': 'offset', 'sort': 'sort', 'total_items': 'totalItems' } def __init__(self, cursor=None, items=None, limit=None, more_items=None, offset=None, sort=None, total_items=None, _configuration=None): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._cursor = None self._items = None self._limit = None self._more_items = None self._offset = None self._sort = None self._total_items = None self.discriminator = None if cursor is not None: self.cursor = cursor if items is not None: self.items = items if limit is not None: self.limit = limit if more_items is not None: self.more_items = more_items if offset is not None: self.offset = offset if sort is not None: self.sort = sort if total_items is not None: self.total_items = total_items @property def cursor(self): return self._cursor @cursor.setter
Apache License 2.0
wildmeorg/wildbook-ia
wbia/web/apis.py
image_conv_feature_api
python
def image_conv_feature_api(rowid=None, model='resnet50', **kwargs): ibs = current_app.ibs gid = rowid assert gid is not None return _image_conv_feature(ibs, gid, model)
r""" RESTful: Method: GET URL: /api/image/feature/json/<uuid>/
https://github.com/wildmeorg/wildbook-ia/blob/017057cfd3a2a7ea22f575842c9473e121c66ea4/wbia/web/apis.py#L248-L258
import logging from os.path import join, exists import zipfile import time from io import BytesIO from flask import request, current_app, send_file from wbia.control import controller_inject from wbia.web import appfuncs as appf import utool as ut import vtool as vt import uuid as uuid_module from wbia.web.app import PROMETHEUS print, rrr, profile = ut.inject2(__name__) logger = logging.getLogger('wbia') CLASS_INJECT_KEY, register_ibs_method = controller_inject.make_ibs_register_decorator( __name__ ) register_api = controller_inject.get_wbia_flask_api(__name__) register_route = controller_inject.get_wbia_flask_route(__name__) @register_api('/api/embed/', methods=['GET']) def web_embed(*args, **kwargs): ibs = current_app.ibs ut.embed() @register_route( '/api/image/src/<rowid>.jpg', methods=['GET'], __route_prefix_check__=False, __route_postfix_check__=False, __route_authenticate__=False, ) def image_src_api_ext(*args, **kwargs): return image_src_api(*args, **kwargs) @register_route( '/api/image/src/<rowid>/', methods=['GET'], __route_prefix_check__=False, __route_authenticate__=False, ) def image_src_api(rowid=None, thumbnail=False, fresh=False, **kwargs): from PIL import Image thumbnail = thumbnail or 'thumbnail' in request.args or 'thumbnail' in request.form ibs = current_app.ibs if thumbnail: gpath = ibs.get_image_thumbpath(rowid, ensure_paths=True) fresh = fresh or 'fresh' in request.args or 'fresh' in request.form if fresh: ut.delete(gpath) gpath = ibs.get_image_thumbpath(rowid, ensure_paths=True) else: gpath = ibs.get_image_paths(rowid) assert gpath is not None, 'image path should not be None' orient = ibs.get_image_orientation(rowid) image = vt.imread(gpath, orient=orient) image = appf.resize_via_web_parameters(image) image = image[:, :, ::-1] image_pil = Image.fromarray(image) img_io = BytesIO() image_pil.save(img_io, 'JPEG', quality=100) img_io.seek(0) return send_file(img_io, mimetype='image/jpeg') @register_route( '/api/annot/src/<rowid>/', methods=['GET'], __route_prefix_check__=False, __route_authenticate__=False, ) def annot_src_api(rowid=None, fresh=False, **kwargs): from PIL import Image ibs = current_app.ibs gpath = ibs.get_annot_chip_fpath(rowid, ensure=True) assert gpath is not None, 'image path should not be None' image = vt.imread(gpath, orient='auto') image = appf.resize_via_web_parameters(image) image = image[:, :, ::-1] image_pil = Image.fromarray(image) img_io = BytesIO() image_pil.save(img_io, 'JPEG', quality=100) img_io.seek(0) return send_file(img_io, mimetype='image/jpeg') @register_route( '/api/background/src/<rowid>/', methods=['GET'], __route_prefix_check__=False, __route_authenticate__=False, ) def background_src_api(rowid=None, fresh=False, **kwargs): from PIL import Image ibs = current_app.ibs gpath = ibs.get_annot_probchip_fpath(rowid) assert gpath is not None, 'image path should not be None' image = vt.imread(gpath, orient='auto') image = appf.resize_via_web_parameters(image) image = image[:, :, ::-1] image_pil = Image.fromarray(image) img_io = BytesIO() image_pil.save(img_io, 'JPEG', quality=100) img_io.seek(0) return send_file(img_io, mimetype='image/jpeg') @register_route( '/api/image/src/json/<uuid>/', methods=['GET'], __route_prefix_check__=False, __route_authenticate__=False, ) def image_src_api_json(uuid=None, **kwargs): ibs = current_app.ibs try: if isinstance(uuid, str): uuid = uuid_module.UUID(uuid) except Exception: from wbia.control.controller_inject import translate_wbia_webreturn return translate_wbia_webreturn( None, success=False, code=500, message='Invalid image UUID' ) gid = ibs.get_image_gids_from_uuid(uuid) return image_src_api(gid, **kwargs) def _image_conv_feature(ibs, gid, model): model = model.lower() model_list = ['vgg16', 'vgg19', 'resnet50', 'inception_v3'] assert model in model_list, 'model must be one of %s' % (model_list,) config = {'algo': model} gid_list = [gid] feature_list = ibs.depc_image.get_property( 'features', gid_list, 'vector', config=config ) feature = feature_list[0] byte_str = feature.tobytes() return byte_str @register_api('/api/image/feature/<rowid>/', methods=['GET'])
Apache License 2.0
applitools/eyes.selenium.python
applitools/core/geometry.py
Point.distance_to
python
def distance_to(self, p): return (self - p).length()
Calculate the distance between two points. :return: The distance to p.
https://github.com/applitools/eyes.selenium.python/blob/3a09a3372a3a8915b3c97ee54fc223580c45c0a3/applitools/core/geometry.py#L74-L81
from __future__ import absolute_import import math import typing as tp from collections import OrderedDict from .errors import EyesError if tp.TYPE_CHECKING: from ..utils.custom_types import ViewPort __all__ = ('Point', 'Region',) class Point(object): __slots__ = ('x', 'y') def __init__(self, x=0, y=0): self.x = int(round(x)) self.y = int(round(y)) def __getstate__(self): return OrderedDict([("x", self.x), ("y", self.y)]) def __setstate__(self, state): self.x = state['x'] self.y = state['y'] def __add__(self, other): return Point(self.x + other.x, self.y + other.y) def __iadd__(self, other): return Point(self.x + other.x, self.y + other.y) def __sub__(self, other): return Point(self.x - other.x, self.y - other.y) def __mul__(self, scalar): return Point(self.x * scalar, self.y * scalar) def __div__(self, scalar): return Point(self.x / scalar, self.y / scalar) def __repr__(self): return "({0}, {1})".format(self.x, self.y) def __bool__(self): return self.x and self.y def __getitem__(self, item): if item not in ('x', 'y'): raise KeyError return getattr(self, item) def __eq__(self, other): return self.x == other.x and self.y == other.y @classmethod def create_top_left(cls): return cls(0, 0) def length(self): return math.sqrt(self.x ** 2 + self.y ** 2)
Apache License 2.0
vshn/crmngr
crmngr/cli.py
create_command_parser
python
def create_command_parser(parent_parser, configuration, **kwargs): parser = parent_parser.add_parser( 'create', description=( 'Create a new environment.\n' '\n' 'Unless --template/-t is specified, this command will create a new ' 'environment containing the following files (tldr. an environemnt ' 'without any modules):\n' '\n' 'Puppetfile\n' '---\n' "forge 'http://forge.puppetlabs.com'\n" '\n' '---\n' '\n' 'manifests/site.pp\n' '---\n' "hiera_include('classes')\n" '---\n' '\n' 'If --template/-t is specified, the command will clone the ' 'existing ENVIRONMENT including all files and directories it ' 'contains.' ), formatter_class=KeepNewlineDescriptionHelpFormatter, help='create a new environment', ) parser.add_argument( dest='environment', type=str, help='name of the new environment' ) parser.add_argument( '-t', '--template', type=str, dest='template', metavar='ENVIRONMENT', help='name of an existing environment to clone the new environment from' ) report_group = parser.add_argument_group('report options') report_group.add_argument( '--no-report', dest='report', action='store_false', help=('disable printing a report for the new environment ' '(default: False)'), ) version_check_group = report_group.add_mutually_exclusive_group() version_check_group.add_argument( '--version-check', dest='version_check', action='store_true', help=('enable check for latest version (forge modules) or latest git ' 'tag (git modules). ' '(default: %s)' % str(configuration.version_check)) ) version_check_group.add_argument( '--no-version-check', dest='version_check', action='store_false', help=('disable check for latest version (forge modules) or latest ' 'git tag (git modules). ' '(default: %s)' % str(not configuration.version_check)) ) wrap_group = report_group.add_mutually_exclusive_group() wrap_group.add_argument( '--wrap', dest='wrap', action='store_true', help=('enable wrapping of long lines. ' '(default: %s)' % str(configuration.wrap)), ) wrap_group.add_argument( '--no-wrap', dest='wrap', action='store_false', help=('disable wrapping long lines. ' '(default: %s)' % str(not configuration.wrap)), ) parser.set_defaults( version_check=configuration.version_check, wrap=configuration.wrap, ) return parser
sets up the argument parser for the create command
https://github.com/vshn/crmngr/blob/966cad4d611d806f4c2230a1da09f2d672468327/crmngr/cli.py#L150-L234
import argparse import sys import textwrap from crmngr.version import __version__ def parse_cli_args(configuration): parser = argparse.ArgumentParser( description='manage a r10k-style control repository', formatter_class=argparse.ArgumentDefaultsHelpFormatter, prog='crmngr', ) parser.add_argument( '-v', '--version', action='version', version='%(prog)s ' + __version__ ) parser.add_argument( '--cache-ttl', dest='cache_ttl', type=int, metavar='TTL', help='time-to-live in seconds for version cache entries', ) parser.add_argument( '-d', '--debug', dest='debug', action='store_true', default=False, help='enable debug output' ) parser.add_argument( '-p', '--profile', dest='profile', default='default', help='crmngr configuration profile' ) parser.set_defaults( cache_ttl=configuration.cache_ttl, ) command_parser = parser.add_subparsers( title='commands', dest='command', description=('valid commands. Use -h/--help on command for usage ' 'details'), ) command_parser.required = True command_parser_defaults = { 'parent_parser': command_parser, 'configuration': configuration, } clean_command_parser(**command_parser_defaults) create_command_parser(**command_parser_defaults) delete_command_parser(**command_parser_defaults) environments_command_parser(**command_parser_defaults) profiles_command_parser(**command_parser_defaults) report_command_parser(**command_parser_defaults) update_parser = update_command_parser(**command_parser_defaults) args = parser.parse_args() try: verify_update_args(args) except CliError as exc: update_parser.print_help() sys.exit("error: %s" % exc) return args def _ensure_single_module(args): if args.modules is None or len(args.modules) < 1: raise CliError('it is not supported to specify --git/--forge ' 'without specifying a module (-m).') if args.modules is not None and len(args.modules) > 1: raise CliError('cannot operate on multiple modules when version ' 'options are set.') if args.reference is not None: raise CliError('it is not supported to specify -r/--reference ' 'in combination with version options.') if args.add and args.remove: raise CliError('it is not supported to specify both --add/--remove ' 'when working on a single module.') def _reject_git_version_parameters(args): if args.git_branch or args.git_commit or args.git_tag: raise CliError('it is not supported to specify --branch/--commit/' '--tag without --git.') def _reject_forge_version_parameter(args): if args.forge_version: raise CliError('it is not supported to specify --version without ' '--forge.') def verify_update_args(args): if args.command != 'update': return if args.git_url: _ensure_single_module(args) _reject_forge_version_parameter(args) if args.forge_version: raise CliError('--version is not supported for git modules.') elif args.forge: _ensure_single_module(args) _reject_git_version_parameters(args) if not args.remove: if '/' not in args.modules[0]: raise CliError('when adding or updating forge modules, -m ' 'has to be in author/module format') else: _reject_forge_version_parameter(args) _reject_git_version_parameters(args) if args.add: raise CliError('--add is not supported for bulk updates. Combine ' '--add with version options.') if args.remove: if args.modules is None or len(args.modules) < 1: raise CliError('it is not supported to specify --remove ' 'without specifying a module filter (-m).') def clean_command_parser(parent_parser, **kwargs): parser = parent_parser.add_parser( 'clean', description=( 'Clean version cache.\n' '\n' 'This will delete the cache directory (~/.crmngr/cache).' ), formatter_class=KeepNewlineDescriptionHelpFormatter, help='clean version cache', ) return parser
BSD 3-Clause New or Revised License
ashcrow/flagon
src/flagon/backends/google_cloud_datastore.py
DatastoreBackend._turn
python
def _turn(self, name, value): ft = FeatureToggle.get_by_id(name) if ft is None: raise errors.UnknownFeatureError('Unknown feature: %s' % name) ft.active = bool(value) ft.put()
Turns a feature off. :param name: name of the feature. :param value: Value to turn name to. :raises: UnknownFeatureError
https://github.com/ashcrow/flagon/blob/50e6aa96854468a89399ef08573e4f814a002d26/src/flagon/backends/google_cloud_datastore.py#L76-L89
from google.appengine.ext import ndb from flagon import errors from flagon.backends import Backend class FeatureToggle(ndb.Model): enabled = ndb.BooleanProperty(indexed=False) strategy_id = ndb.StringProperty('strategyId', indexed=False) strategy_params_names = ndb.StringProperty('strategyParamsNames', indexed=False, repeated=True) strategy_params_values = ndb.StringProperty('strategyParamsValues', indexed=False, repeated=True) class DatastoreBackend(Backend): def __init__(self): pass @ndb.non_transactional def exists(self, name): if FeatureToggle.get_by_id(name) is None: return False else: return True @ndb.non_transactional def is_active(self, name): ft = FeatureToggle.get_by_id(name) if ft is None: raise errors.UnknownFeatureError('Unknown feature: %s' % name) else: return ft.enabled @ndb.non_transactional
MIT License
mozilla/pymake
pymake/data.py
mtimeislater
python
def mtimeislater(deptime, targettime): if deptime is None: return True if targettime is None: return False return int(1000 * deptime) > int(1000 * targettime)
Is the mtime of the dependency later than the target?
https://github.com/mozilla/pymake/blob/034ae9ea5b726e03647d049147c5dbf688e94aaf/pymake/data.py#L31-L41
import logging, re, os, sys from functools import reduce import parserdata, parser, functions, process, util, implicit import globrelative from pymake import errors try: from cStringIO import StringIO except ImportError: from io import StringIO if sys.version_info[0] < 3: str_type = basestring else: str_type = str _log = logging.getLogger('pymake.data') def withoutdups(it): r = set() for i in it: if not i in r: r.add(i) yield i
MIT License
pycontribs/pyrax
pyrax/client.py
BaseClient.create
python
def create(self, *args, **kwargs): return self._manager.create(*args, **kwargs)
Creates a new resource.
https://github.com/pycontribs/pyrax/blob/a0c022981f76a4cba96a22ecc19bb52843ac4fbe/pyrax/client.py#L99-L101
from __future__ import absolute_import, unicode_literals import json import logging import time import requests import six from six.moves import urllib import pyrax import pyrax.exceptions as exc def _safe_quote(val): SAFE_QUOTE_CHARS = b"/.?&=," if isinstance(val, six.text_type): val = val.encode(pyrax.get_encoding()) return urllib.parse.quote(val, safe=SAFE_QUOTE_CHARS) class BaseClient(object): user_agent = None name = "base" def __init__(self, identity, region_name=None, endpoint_type=None, management_url=None, service_name=None, timings=False, verify_ssl=True, http_log_debug=False, timeout=None): self.version = "v1.1" self.identity = identity self.region_name = region_name self.endpoint_type = endpoint_type self.service_name = service_name self.management_url = management_url self.timings = timings self.verify_ssl = verify_ssl self.http_log_debug = http_log_debug self.timeout = timeout self.times = [] self._manager = None self._configure_manager() def _configure_manager(self): raise NotImplementedError def list(self, limit=None, marker=None): return self._manager.list(limit=limit, marker=marker) def get(self, item): return self._manager.get(item)
Apache License 2.0
maldoinc/mamba
mamba/ast.py
full_eval
python
def full_eval(expr: BaseExpression): while isinstance(expr, BaseExpression): expr = expr.eval() return expr
Fully evaluates the passex expression returning it's value
https://github.com/maldoinc/mamba/blob/d11974b916dc672067123823dfbc66ce51c66f61/mamba/ast.py#L70-L78
import operator from types import LambdaType from mamba.exceptions import * import mamba.symbol_table symbols = mamba.symbol_table.SymbolTable() class InstructionList: def __init__(self, children=None): if children is None: children = [] self.children = children def __len__(self): return len(self.children) def __iter__(self): return iter(self.children) def __repr__(self): return '<InstructionList {0}>'.format(self.children) def eval(self): ret = [] for n in self: if isinstance(n, ExitStatement): return n res = n.eval() if isinstance(res, ExitStatement): return res elif res is not None: ret.append(res) return ret class BaseExpression: def eval(self): raise NotImplementedError() class ExitStatement(BaseExpression): def __iter__(self): return [] def eval(self): pass class ReturnStatement(ExitStatement): def __init__(self, expr: BaseExpression): self.expr = expr def __repr__(self): return '<Return expr={0}>'.format(self.expr) def eval(self): return full_eval(self.expr)
MIT License
drorlab/atom3d
atom3d/filters/filters.py
first_chain_filter
python
def first_chain_filter(df): chains = df[['structure', 'model', 'chain']].drop_duplicates() chains = chains.sort_values(['structure', 'model', 'chain']) chains['to_keep'] = ~chains['structure'].duplicated() chains_to_keep = chains.set_index(['structure', 'model', 'chain']) to_keep = chains_to_keep.loc[df.set_index(['structure', 'model', 'chain']).index] return df[to_keep.values]
Remove anything beyond first model/chain in structure. :param df: dataframe to filter against. :type df: atoms dataframe. :return: same dataframe, but with only with atoms corresponding to the first chain of the first model left. :rtype: atoms dataframe.
https://github.com/drorlab/atom3d/blob/7eacb676f56b4130fd805f4b2901a600170b88f9/atom3d/filters/filters.py#L55-L74
import Bio.PDB.Polypeptide as Poly import pandas as pd import scipy.spatial as ss import numpy as np def standard_residue_filter(df): residues = df[['structure', 'model', 'chain', 'residue', 'resname']] .drop_duplicates() sel = residues['resname'].apply( lambda x: Poly.is_aa(x, standard=True)) residues['to_keep'] = sel residues_to_keep = residues.set_index( ['structure', 'model', 'chain', 'residue', 'resname'])['to_keep'] to_keep = residues_to_keep.loc[df.set_index( ['structure', 'model', 'chain', 'residue', 'resname']).index] return df[to_keep.values] def first_model_filter(df): models = df[['structure', 'model']].drop_duplicates() models = models.sort_values(['structure', 'model']) models['to_keep'] = ~models['structure'].duplicated() models_to_keep = models.set_index(['structure', 'model']) to_keep = models_to_keep.loc[df.set_index(['structure', 'model']).index] return df[to_keep.values]
MIT License
ibm-security/ibmsecurity
ibmsecurity/isam/base/management_authorization/role.py
get_all
python
def get_all(isamAppliance, check_mode=False, force=False): return isamAppliance.invoke_get("Get management authorization - roles", "/authorization/roles/v1")
Get management authorization - roles
https://github.com/ibm-security/ibmsecurity/blob/da098f7d555e571a99a0d7cd47a51add483feb6f/ibmsecurity/isam/base/management_authorization/role.py#L7-L12
import logging import ibmsecurity.utilities.tools logger = logging.getLogger(__name__)
Apache License 2.0
simopt-admin/simopt
simopt/base.py
Problem.response_dict_to_objectives
python
def response_dict_to_objectives(self, response_dict): raise NotImplementedError
Convert a dictionary with response keys to a vector of objectives. Arguments --------- response_dict : dictionary dictionary with response keys and associated values Returns ------- objectives : tuple vector of objectives
https://github.com/simopt-admin/simopt/blob/eda24b9f6a5885a37321ad7f8534bf10dec22480/simopt/base.py#L417-L432
import numpy as np from copy import deepcopy from rng.mrg32k3a import MRG32k3a class Solver(object): def __init__(self, fixed_factors): self.factors = fixed_factors for key in self.specifications: if key not in fixed_factors: self.factors[key] = self.specifications[key]["default"] def __eq__(self, other): if type(self) == type(other): if self.factors == other.factors: return True else: print("Solver factors do not match.") return False else: print("Solver types do not match.") return False def attach_rngs(self, rng_list): self.rng_list = rng_list def solve(self, problem): raise NotImplementedError def check_crn_across_solns(self): return True def check_solver_factor(self, factor_name): is_permissible = True is_permissible *= self.check_factor_datatype(factor_name) is_permissible *= self.check_factor_list[factor_name]() return is_permissible def check_solver_factors(self): return True def check_factor_datatype(self, factor_name): is_right_type = isinstance(self.factors[factor_name], self.specifications[factor_name]["datatype"]) return is_right_type def create_new_solution(self, x, problem): new_solution = Solution(x, problem) new_solution.attach_rngs(rng_list=self.solution_progenitor_rngs, copy=True) if not self.factors["crn_across_solns"]: for rng in self.solution_progenitor_rngs: for _ in range(problem.model.n_rngs): rng.advance_substream() return new_solution def rebase(self, n_reps): new_rngs = [] for rng in self.solution_progenitor_rngs: stream_index = rng.s_ss_sss_index[0] substream_index = rng.s_ss_sss_index[1] new_rngs.append(MRG32k3a(s_ss_sss_index=[stream_index, substream_index, n_reps])) self.solution_progenitor_rngs = new_rngs class Problem(object): def __init__(self, fixed_factors, model_fixed_factors): self.factors = fixed_factors for key in self.specifications: if key not in fixed_factors: self.factors[key] = self.specifications[key]["default"] for key in self.model_default_factors: if key not in model_fixed_factors: model_fixed_factors[key] = self.model_default_factors[key] self.model_fixed_factors = model_fixed_factors def __eq__(self, other): if type(self) == type(other): if self.factors == other.factors: non_decision_factors = set(self.model.factors.keys()) - self.model_decision_factors for factor in non_decision_factors: if self.model.factors[factor] != other.model.factors[factor]: print("Model factors do not match") return False return True else: print("Problem factors do not match.") return False else: print("Problem types do not match.") return False def check_initial_solution(self): return self.check_deterministic_constraints(x=self.factors["initial_solution"]) def check_budget(self): return self.factors["budget"] > 0 def check_problem_factor(self, factor_name): is_permissible = True is_permissible *= self.check_factor_datatype(factor_name) is_permissible *= self.check_factor_list[factor_name]() return is_permissible def check_problem_factors(self): return True def check_factor_datatype(self, factor_name): is_right_type = isinstance(self.factors[factor_name], self.specifications[factor_name]["datatype"]) return is_right_type def attach_rngs(self, rng_list): self.rng_list = rng_list def vector_to_factor_dict(self, vector): raise NotImplementedError def factor_dict_to_vector(self, factor_dict): raise NotImplementedError
MIT License
dcbark01/pi-web-api-client-python
osisoft/pidevclub/piwebapi/api_client.py
ApiClient.__deserialize_datatime
python
def __deserialize_datatime(self, string): try: from dateutil.parser import parse return parse(string) except ImportError: return string except ValueError: raise rest.ApiException( status=0, reason=( "Failed to parse `{0}` into a datetime object" .format(string) ) )
Deserializes string to datetime. The string should be in iso8601 datetime format. :param string: str. :return: datetime.
https://github.com/dcbark01/pi-web-api-client-python/blob/b620f72f2d2551632f406df44bd409f5cc305055/osisoft/pidevclub/piwebapi/api_client.py#L459-L480
from requests.auth import HTTPBasicAuth from requests_kerberos import HTTPKerberosAuth, OPTIONAL from requests_ntlm import HttpNtlmAuth import re import json import threading from datetime import date, datetime from six import PY3, integer_types, iteritems, text_type from six.moves.urllib.parse import quote from osisoft.pidevclub.piwebapi import rest, models class ApiClient(object): PRIMITIVE_TYPES = (float, bool, bytes, text_type) + integer_types NATIVE_TYPES_MAPPING = { 'int': int, 'long': int, 'float': float, 'str': str, 'bool': bool, 'date': date, 'datetime': datetime, 'object': object, } def __init__(self, host, verifySsl): self.rest_client = rest.RESTClientObject(verifySsl) self.default_headers = {} self.host = host self.user_agent = 'PI-Web-API/1.0.0/python' @property def user_agent(self): return self.default_headers['User-Agent'] @user_agent.setter def user_agent(self, value): self.default_headers['User-Agent'] = value def set_kerberos_auth(self): self.rest_client.auth = HTTPKerberosAuth(force_preemptive=True, mutual_authentication=OPTIONAL, delegate=True) def set_ntlm_auth(self, username, password): self.rest_client.auth = HttpNtlmAuth(username, password) def set_basic_auth(self, username, password): self.rest_client.auth = HTTPBasicAuth(username, password) def set_default_header(self, header_name, header_value): self.default_headers[header_name] = header_value def __call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None, post_params=None, files=None, response_type=None, callback=None, _return_http_data_only=None, collection_formats=None, _preload_content=True, _request_timeout=None): header_params = {} header_params['X-Requested-With'] = 'PIWebApiWrapper' header_params['Content-Type'] = 'application/json' header_params['Accept'] = 'application/json' if path_params: path_params = self.sanitize_for_serialization(path_params) path_params = self.parameters_to_tuples(path_params, collection_formats) for k, v in path_params: resource_path = resource_path.replace( '{%s}' % k, quote(str(v), safe='')) if query_params: query_params = self.sanitize_for_serialization(query_params) query_params = self.parameters_to_tuples(query_params, collection_formats) if body: body = self.sanitize_for_serialization(body) url = self.host + resource_path response_data = self.request(method, url, query_params=query_params, headers=header_params, body=body) return_data = response_data if _preload_content: if response_type: return_data = self.deserialize(response_data, response_type) else: return_data = None if callback: if _return_http_data_only: callback(return_data) else: callback((return_data, response_data.status, response_data.headers)) elif _return_http_data_only: return (return_data) else: return (return_data, response_data.status_code, response_data.headers) def sanitize_for_serialization(self, obj): if obj is None: return None elif isinstance(obj, self.PRIMITIVE_TYPES): return obj elif isinstance(obj, list): return [self.sanitize_for_serialization(sub_obj) for sub_obj in obj] elif isinstance(obj, tuple): return tuple(self.sanitize_for_serialization(sub_obj) for sub_obj in obj) elif isinstance(obj, (datetime, date)): return obj.isoformat() if isinstance(obj, dict): obj_dict = obj else: obj_dict = {obj.attribute_map[attr]: getattr(obj, attr) for attr, _ in iteritems(obj.swagger_types) if getattr(obj, attr) is not None} return {key: self.sanitize_for_serialization(val) for key, val in iteritems(obj_dict)} def deserialize(self, response, response_type): if response_type == "file": return self.__deserialize_file(response) try: response.data = response.content.decode('utf-8') data = json.loads(response.data) except ValueError: data = response.data return self.deserialize_object(data, response_type) def deserialize_object(self, data, klass): if data is None: return None if type(klass) == str: if klass.startswith('list['): sub_kls = re.match('list\[(.*)\]', klass).group(1) return [self.deserialize_object(sub_data, sub_kls) for sub_data in data] if klass.startswith('dict('): sub_kls = re.match('dict\(([^,]*), (.*)\)', klass).group(2) return {k: self.deserialize_object(v, sub_kls) for k, v in iteritems(data)} if klass in self.NATIVE_TYPES_MAPPING: klass = self.NATIVE_TYPES_MAPPING[klass] else: klass = getattr(models, klass) if klass in self.PRIMITIVE_TYPES: return self.__deserialize_primitive(data, klass) elif klass == object: return self.__deserialize_object(data) elif klass == date: return self.__deserialize_date(data) elif klass == datetime: return self.__deserialize_datatime(data) else: return self.__deserialize_model(data, klass) def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None, post_params=None, files=None, response_type=None, callback=None, _return_http_data_only=None, collection_formats=None, _preload_content=True, _request_timeout=None): if callback is None: return self.__call_api(resource_path, method, path_params, query_params, header_params, body, post_params, files, response_type, callback, _return_http_data_only, collection_formats, _preload_content, _request_timeout) else: thread = threading.Thread(target=self.__call_api, args=(resource_path, method, path_params, query_params, header_params, body, post_params, files, response_type, callback, _return_http_data_only, collection_formats, _preload_content, _request_timeout)) thread.start() return thread def request(self, method, url, query_params=None, headers=None, body=None): return self.rest_client.send_request(url, method, body=body, query_params=query_params, headers=headers) def parameters_to_tuples(self, params, collection_formats): new_params = [] if collection_formats is None: collection_formats = {} for k, v in iteritems(params) if isinstance(params, dict) else params: if k in collection_formats: collection_format = collection_formats[k] if collection_format == 'multi': new_params.extend((k, value) for value in v) else: if collection_format == 'ssv': delimiter = ' ' elif collection_format == 'tsv': delimiter = '\t' elif collection_format == 'pipes': delimiter = '|' else: delimiter = ',' new_params.append( (k, delimiter.join(str(value) for value in v))) else: new_params.append((k, v)) return new_params def prepare_post_parameters(self, post_params=None, files=None): params = [] if post_params: params = post_params return params def select_header_accept(self, accepts): if not accepts: return accepts = [x.lower() for x in accepts] if 'application/json' in accepts: return 'application/json' else: return ', '.join(accepts) def select_header_content_type(self, content_types): if not content_types: return 'application/json' content_types = [x.lower() for x in content_types] if 'application/json' in content_types or '*/*' in content_types: return 'application/json' else: return content_types[0] def __deserialize_primitive(self, data, klass): try: return klass(data) except UnicodeEncodeError: return None except TypeError: return data def __deserialize_object(self, value): return value def __deserialize_date(self, string): try: from dateutil.parser import parse return parse(string).date() except ImportError: return string except ValueError: raise rest.ApiException( status=0, reason="Failed to parse `{0}` into a date object".format(string) )
Apache License 2.0
google/openhtf
pylint_plugins/conf_plugin.py
transform_conf_module
python
def transform_conf_module(cls): global CONF_NODE if cls.name == 'openhtf.conf': cls._locals.update(cls.locals['Configuration'][0].locals) CONF_NODE = cls CONF_LOCALS.update(cls.locals)
Transform usages of the conf module by updating locals.
https://github.com/google/openhtf/blob/4646aa6b9ba67532ce7e8743ce16d7bd4369ad3d/pylint_plugins/conf_plugin.py#L62-L72
import astroid from astroid import MANAGER def __init__(self): pass CONF_NODE = None CONF_LOCALS = {} CURRENT_ROOT = None def transform_declare(node): global CURRENT_ROOT if not (isinstance(node.func, astroid.Attribute) and isinstance(node.func.expr, astroid.Name) and node.func.expr.name == 'conf' and node.func.attrname == 'declare'): return conf_key_name = None if node.args: conf_key_name = node.args[0].value else: for keyword in node.keywords: if keyword.arg == 'name': conf_key_name = keyword.value.value break assert conf_key_name != None, "Invalid conf.declare() syntax" if CONF_NODE: if not CURRENT_ROOT or CURRENT_ROOT != node.root(): CURRENT_ROOT = node.root() CONF_NODE.locals = CONF_LOCALS CONF_NODE.locals[conf_key_name] = [None] else: CONF_LOCALS[conf_key_name] = [None]
Apache License 2.0
quantmind/lux
lux/ext/sockjs/rpc/channels.py
WsChannelsRpc.ws_unsubscribe
python
def ws_unsubscribe(self, wsrequest): channel = wsrequest.required_param('channel') event = wsrequest.required_param('event') wsrequest.check_permission('channels:%s:%s' % (channel, event), 'read') return self.channel_unsubscribe(wsrequest, channel, event)
Un-subscribe from an event on a channel From the client:: client.rpc('unsubscribe', {'channel': 'mychannel', 'event': 'myevent'})
https://github.com/quantmind/lux/blob/7318fcd86c77616aada41d8182a04339680a554c/lux/ext/sockjs/rpc/channels.py#L42-L54
from pulsar.apps.data.channels import CallbackError class WsChannelsRpc: def channel_writer(self, ws): if not hasattr(ws, 'channel_writer'): ws.channel_writer = Writer(ws, self) return ws.channel_writer def ws_publish(self, wsrequest): channel = wsrequest.required_param('channel') event = wsrequest.required_param('event') wsrequest.check_permission('channels:%s:%s' % (channel, event), 'update') data = wsrequest.params.get('data') return self.channel_publish(wsrequest, channel, event, data) def ws_subscribe(self, wsrequest): channel = wsrequest.required_param('channel') event = wsrequest.required_param('event') wsrequest.check_permission('channels:%s:%s' % (channel, event), 'read') return self.channel_subscribe(wsrequest, channel, event)
BSD 3-Clause New or Revised License
elcorto/imagecluster
imagecluster/calc.py
cluster_stats
python
def cluster_stats(clusters): return np.array([[k, len(clusters[k])] for k in np.sort(list(clusters.keys()))], dtype=int)
Count clusters of different sizes. Returns ------- 2d array Array with column 1 = csize sorted (number of images in the cluster) and column 2 = cnum (number of clusters with that size). :: [[csize, cnum], [...], ]
https://github.com/elcorto/imagecluster/blob/687fa357a1e164968349a5ae20a557f1e353354e/imagecluster/calc.py#L258-L274
import os from collections import OrderedDict import numpy as np from scipy.spatial import distance from scipy.cluster import hierarchy from sklearn.decomposition import PCA from tensorflow.keras.applications.vgg16 import VGG16, preprocess_input from tensorflow.keras.models import Model pj = os.path.join def get_model(layer='fc2'): base_model = VGG16(weights='imagenet', include_top=True) model = Model(inputs=base_model.input, outputs=base_model.get_layer(layer).output) return model def fingerprint(image, model): if image.shape[2] == 1: image = image.repeat(3, axis=2) arr4d = np.expand_dims(image, axis=0) arr4d_pp = preprocess_input(arr4d) return model.predict(arr4d_pp)[0,:] def fingerprints(images, model): fingerprints = {} for fn,image in images.items(): print(fn) fingerprints[fn] = fingerprint(image, model) return fingerprints def pca(fingerprints, n_components=0.9, **kwds): if 'n_components' not in kwds.keys(): kwds['n_components'] = n_components _fingerprints = OrderedDict(fingerprints) X = np.array(list(_fingerprints.values())) Xp = PCA(**kwds).fit(X).transform(X) return {k:v for k,v in zip(_fingerprints.keys(), Xp)} def cluster(fingerprints, sim=0.5, timestamps=None, alpha=0.3, method='average', metric='euclidean', extra_out=False, print_stats=True, min_csize=2): assert 0 <= sim <= 1, "sim not 0..1" assert 0 <= alpha <= 1, "alpha not 0..1" assert min_csize >= 1, "min_csize must be >= 1" files = list(fingerprints.keys()) dfps = distance.pdist(np.array(list(fingerprints.values())), metric) if timestamps is not None: set_files = set(files) set_tsfiles = set(timestamps.keys()) set_diff = set_files.symmetric_difference(set_tsfiles) assert len(set_diff) == 0, (f"files in fingerprints and timestamps do " f"not match: diff={set_diff}") tsarr = np.array([timestamps[k] for k in files])[:,None] dts = distance.pdist(tsarr, metric) dts = dts / dts.max() dfps = dfps / dfps.max() dfps = dfps * (1 - alpha) + dts * alpha Z = hierarchy.linkage(dfps, method=method, metric=metric) cut = hierarchy.fcluster(Z, t=dfps.max()*(1.0-sim), criterion='distance') cluster_dct = dict((iclus, []) for iclus in np.unique(cut)) for iimg,iclus in enumerate(cut): cluster_dct[iclus].append(files[iimg]) clusters = {} for cluster in cluster_dct.values(): csize = len(cluster) if csize >= min_csize: if not (csize in clusters.keys()): clusters[csize] = [cluster] else: clusters[csize].append(cluster) if print_stats: print_cluster_stats(clusters) if extra_out: extra = {'Z': Z, 'dfps': dfps, 'cluster_dct': cluster_dct, 'cut': cut} return clusters, extra else: return clusters
BSD 3-Clause New or Revised License
openstack/futurist
futurist/_thread.py
_clean_up
python
def _clean_up(): global _dying _dying = True threads_to_wait_for = [] while _to_be_cleaned: worker, _work_val = _to_be_cleaned.popitem() worker.stop(soon_as_possible=True) threads_to_wait_for.append(worker) while threads_to_wait_for: worker = threads_to_wait_for.pop() try: join_thread(worker) finally: del worker
Ensure all threads that were created were destroyed cleanly.
https://github.com/openstack/futurist/blob/d70c7e5f4686ba0ea9235b280f8bdd07fecfeccb/futurist/_thread.py#L129-L143
import atexit import sys import threading import weakref import six from six.moves import queue as compat_queue class Threading(object): @staticmethod def event_object(*args, **kwargs): return threading.Event(*args, **kwargs) @staticmethod def lock_object(*args, **kwargs): return threading.Lock(*args, **kwargs) @staticmethod def rlock_object(*args, **kwargs): return threading.RLock(*args, **kwargs) @staticmethod def condition_object(*args, **kwargs): return threading.Condition(*args, **kwargs) _to_be_cleaned = weakref.WeakKeyDictionary() _dying = False if six.PY2: join_thread = lambda thread: thread.join(sys.maxint) else: join_thread = lambda thread: thread.join() _TOMBSTONE = object() class ThreadWorker(threading.Thread): MAX_IDLE_FOR = 1 def __init__(self, executor, work_queue): super(ThreadWorker, self).__init__() self.work_queue = work_queue self.should_stop = False self.idle = False self.daemon = True self.executor_ref = weakref.ref( executor, lambda _obj: work_queue.put(_TOMBSTONE)) @classmethod def create_and_register(cls, executor, work_queue): w = cls(executor, work_queue) _to_be_cleaned[w] = True return w def _is_dying(self): if self.should_stop or _dying: return True executor = self.executor_ref() if executor is None: return True del executor return False def _wait_for_work(self): self.idle = True work = None while work is None: try: work = self.work_queue.get(True, self.MAX_IDLE_FOR) except compat_queue.Empty: if self._is_dying(): work = _TOMBSTONE self.idle = False return work def stop(self, soon_as_possible=False): if soon_as_possible: self.should_stop = True self.work_queue.put(_TOMBSTONE) def run(self): while not self._is_dying(): work = self._wait_for_work() try: if work is _TOMBSTONE: self.work_queue.put(_TOMBSTONE) return else: work.run() finally: del work
Apache License 2.0
ericchansen/q2mm
q2mm/simplex.py
calc_simp_var
python
def calc_simp_var(params): logger.log(1, '>>> params: {}'.format(params)) logger.log(1, '>>> 1st ders.: {}'.format([x.d1 for x in params])) logger.log(1, '>>> 2nd ders.: {}'.format([x.d2 for x in params])) for param in params: param.simp_var = param.d2 / param.d1**2.
Simplex variable is calculated: (2nd der.) / (1st der.)**2
https://github.com/ericchansen/q2mm/blob/5adc047fba4dac3768847cc13c15ba54d98e1df4/q2mm/simplex.py#L404-L412
from __future__ import absolute_import from __future__ import division import copy import collections import logging import logging.config import numpy as np import re import sqlite3 import textwrap import calculate import compare import constants as co import datatypes import opt import parameters logger = logging.getLogger(__name__) class Simplex(opt.Optimizer): def __init__(self, direc=None, ff=None, ff_lines=None, args_ff=None, args_ref=None): super(Simplex, self).__init__( direc, ff, ff_lines, args_ff, args_ref) self._max_cycles_wo_change = None self.do_massive_contraction = True self.do_weighted_reflection = True self.max_cycles = 100 self.max_params = 3 @property def best_ff(self): if self.new_ffs: self.new_ffs = sorted(self.new_ffs, key=lambda x: x.score) if self.new_ffs[0].score < self.ff.score: best_ff = self.new_ffs[0] best_ff = restore_simp_ff(best_ff, self.ff) return best_ff else: return self.ff else: return self.ff @opt.catch_run_errors def run(self, r_data=None): if r_data is None: r_data = opt.return_ref_data(self.args_ref) if self.ff.score is None: logger.log(20, '~~ CALCULATING INITIAL FF SCORE ~~'.rjust(79, '~')) self.ff.export_ff() data = calculate.main(self.args_ff) r_dict = compare.data_by_type(r_data) c_dict = compare.data_by_type(data) r_dict, c_dict = compare.trim_data(r_dict,c_dict) self.ff.score = compare.compare_data(r_dict, c_dict) else: logger.log(20, ' -- Reused existing score and data for initial FF.') logger.log(20, '~~ SIMPLEX OPTIMIZATION ~~'.rjust(79, '~')) logger.log(20, 'INIT FF SCORE: {}'.format(self.ff.score)) opt.pretty_ff_results(self.ff, level=20) if self.max_params and len(self.ff.params) > self.max_params: logger.log(20, ' -- More parameters than the maximum allowed.') logger.log(5, 'CURRENT PARAMS: {}'.format(len(self.ff.params))) logger.log(5, 'MAX PARAMS: {}'.format(self.max_params)) if None in [x.d1 for x in self.ff.params]: logger.log(15, ' -- Calculating new parameter derivatives.') ffs = opt.differentiate_ff(self.ff, central=True) for ff in ffs: ff.export_ff(path=self.ff.path, lines=self.ff_lines) logger.log(20, ' -- Calculating {}.'.format(ff)) data = calculate.main(self.args_ff) r_dict = compare.data_by_type(r_data) c_dict = compare.data_by_type(data) r_dict, c_dict = compare.trim_data(r_dict,c_dict) ff.score = compare.compare_data(r_dict, c_dict) opt.pretty_ff_results(ff) opt.param_derivs(self.ff, ffs) ffs = opt.extract_forward(ffs) logger.log(5, ' -- Keeping {} forward differentiated ' 'FFs.'.format(len(ffs))) else: logger.log(15, ' -- Reusing existing parameter derivatives.') ffs = opt.differentiate_ff(self.ff, central=False) params = select_simp_params_on_derivs( self.ff.params, max_params=self.max_params) self.new_ffs = opt.extract_ff_by_params(ffs, params) logger.log(1, '>>> len(self.new_ffs): {}'.format(len(self.new_ffs))) ff_rows = [x.mm3_row for x in params] ff_cols = [x.mm3_col for x in params] for ff in self.new_ffs: new_params = [] for param in ff.params: if param.mm3_row in ff_rows and param.mm3_col in ff_cols: new_params.append(param) ff.params = new_params ff_copy = copy.deepcopy(self.ff) new_params = [] for param in ff.params: if param.mm3_row in ff_rows and param.mm3_col in ff_cols: new_params.append(param) ff_copy.params = new_params else: self.new_ffs = opt.differentiate_ff(self.ff, central=False) logger.log(1, '>>> len(self.new_ffs): {}'.format(len(self.new_ffs))) ff_copy = copy.deepcopy(self.ff) for ff in self.new_ffs: if ff.score is None: ff.export_ff(path=self.ff.path, lines=self.ff_lines) logger.log(20, ' -- Calculating {}.'.format(ff)) data = calculate.main(self.args_ff) r_dict = compare.data_by_type(r_data) c_dict = compare.data_by_type(data) r_dict, c_dict = compare.trim_data(r_dict,c_dict) ff.score = compare.compare_data(r_dict, c_dict) opt.pretty_ff_results(ff) self.new_ffs = sorted(self.new_ffs + [ff_copy], key=lambda x: x.score) self._max_cycles_wo_change = 3 * (len(self.new_ffs) - 1) wrapper = textwrap.TextWrapper(width=79) opt.pretty_ff_params(self.new_ffs) current_cycle = 0 cycles_wo_change = 0 while current_cycle < self.max_cycles and cycles_wo_change < self._max_cycles_wo_change: current_cycle += 1 last_best_ff = copy.deepcopy(self.new_ffs[0]) logger.log(20, '~~ START SIMPLEX CYCLE {} ~~'.format( current_cycle).rjust(79, '~')) logger.log(20, 'ORDERED FF SCORES:') logger.log(20, wrapper.fill('{}'.format( ' '.join('{:15.4f}'.format(x.score) for x in self.new_ffs)))) inv_ff = self.ff.__class__() if self.do_weighted_reflection: inv_ff.method = 'WEIGHTED INVERSION' else: inv_ff.method = 'INVERSION' inv_ff.params = copy.deepcopy(last_best_ff.params) ref_ff = self.ff.__class__() ref_ff.method = 'REFLECTION' ref_ff.params = copy.deepcopy(last_best_ff.params) if self.do_weighted_reflection: score_diff_sum = sum([x.score - self.new_ffs[-1].score for x in self.new_ffs[:-1]]) if score_diff_sum == 0.: logger.warning( 'No difference between force field scores. ' 'Exiting simplex.') raise opt.OptError( 'No difference between force field scores. ' 'Exiting simplex.') for i in range(0, len(last_best_ff.params)): if self.do_weighted_reflection: inv_val = ( sum([x.params[i].value * (x.score - self.new_ffs[-1].score) for x in self.new_ffs[:-1]]) / score_diff_sum) else: inv_val = ( sum([x.params[i].value for x in self.new_ffs[:-1]]) / len(self.new_ffs[:-1])) inv_ff.params[i].value = inv_val ref_ff.params[i].value = ( 2 * inv_val - self.new_ffs[-1].params[i].value) ref_ff.export_ff(path=self.ff.path, lines=self.ff.lines) data = calculate.main(self.args_ff) r_dict = compare.data_by_type(r_data) c_dict = compare.data_by_type(data) r_dict, c_dict = compare.trim_data(r_dict,c_dict) ref_ff.score = compare.compare_data(r_dict, c_dict) opt.pretty_ff_results(ref_ff) if ref_ff.score < last_best_ff.score: logger.log(20, '~~ ATTEMPTING EXPANSION ~~'.rjust(79, '~')) exp_ff = self.ff.__class__() exp_ff.method = 'EXPANSION' exp_ff.params = copy.deepcopy(last_best_ff.params) for i in range(0, len(last_best_ff.params)): exp_ff.params[i].value = ( 3 * inv_ff.params[i].value - 2 * self.new_ffs[-1].params[i].value) exp_ff.export_ff(path=self.ff.path, lines=self.ff.lines) data = calculate.main(self.args_ff) r_dict = compare.data_by_type(r_data) c_dict = compare.data_by_type(data) r_dict, c_dict = compare.trim_data(r_dict,c_dict) exp_ff.score = compare.compare_data(r_dict, c_dict) opt.pretty_ff_results(exp_ff) if exp_ff.score < ref_ff.score: self.new_ffs[-1] = exp_ff logger.log( 20, ' -- Expansion succeeded. Keeping expanded ' 'parameters.') else: self.new_ffs[-1] = ref_ff logger.log( 20, ' -- Expansion failed. Keeping reflected parameters.') elif ref_ff.score < self.new_ffs[-2].score: logger.log(20, ' -- Keeping reflected parameters.') self.new_ffs[-1] = ref_ff else: logger.log(20, '~~ ATTEMPTING CONTRACTION ~~'.rjust(79, '~')) con_ff = self.ff.__class__() con_ff.method = 'CONTRACTION' con_ff.params = copy.deepcopy(last_best_ff.params) for i in range(0, len(last_best_ff.params)): if ref_ff.score > self.new_ffs[-1].score: con_val = ( (inv_ff.params[i].value + self.new_ffs[-1].params[i].value) / 2) else: con_val = ( (3 * inv_ff.params[i].value - self.new_ffs[-1].params[i].value) / 2) con_ff.params[i].value = con_val self.ff.export_ff(params=con_ff.params) data = calculate.main(self.args_ff) r_dict = compare.data_by_type(r_data) c_dict = compare.data_by_type(data) r_dict, c_dict = compare.trim_data(r_dict,c_dict) con_ff.score = compare.compare_data(r_dict, c_dict) opt.pretty_ff_results(con_ff) if con_ff.score < self.new_ffs[-2].score: logger.log(20, ' -- Contraction succeeded.') self.new_ffs[-1] = con_ff elif self.do_massive_contraction: logger.log( 20, '~~ DOING MASSIVE CONTRACTION ~~'.rjust(79, '~')) for ff_num, ff in enumerate(self.new_ffs[1:]): for i in range(0, len(last_best_ff.params)): ff.params[i].value = ( (ff.params[i].value + self.new_ffs[0].params[i].value) / 2) self.ff.export_ff(params=ff.params) data = calculate.main(self.args_ff) r_dict = compare.data_by_type(r_data) c_dict = compare.data_by_type(data) r_dict, c_dict = compare.trim_data(r_dict,c_dict) ff.score = compare.compare_data(r_dict, c_dict) ff.method += ' MC' opt.pretty_ff_results(ff) else: logger.log( 20, ' -- Contraction failed. Keeping parmaeters ' 'anyway.') self.new_ffs[-1] = con_ff self.new_ffs = sorted(self.new_ffs, key=lambda x: x.score) if self.new_ffs[0].score < last_best_ff.score: cycles_wo_change = 0 else: cycles_wo_change += 1 logger.log(20, ' -- {} cycles without improvement out of {} ' 'allowed.'.format( cycles_wo_change, self._max_cycles_wo_change)) logger.log(20, 'BEST:') opt.pretty_ff_results(self.new_ffs[0], level=20) logger.log(20, '~~ END SIMPLEX CYCLE {} ~~'.format( current_cycle).rjust(79, '~')) self.new_ffs = sorted(self.new_ffs, key=lambda x: x.score) best_ff = self.new_ffs[0] if best_ff.score < self.ff.score: logger.log(20, '~~ SIMPLEX FINISHED WITH IMPROVEMENTS ~~'.rjust( 79, '~')) best_ff = restore_simp_ff(best_ff, self.ff) else: logger.log(20, '~~ SIMPLEX FINISHED WITHOUT IMPROVEMENTS ~~'.rjust( 79, '~')) best_ff = self.ff opt.pretty_ff_results(self.ff, level=20) opt.pretty_ff_results(best_ff, level=20) logger.log(20, ' -- Writing best force field from simplex.') best_ff.export_ff(best_ff.path) return best_ff
MIT License
torchmd/mdgrad
nff/data/graphs.py
get_neighbor_list
python
def get_neighbor_list(xyz, cutoff=5, undirected=True): xyz = torch.Tensor(xyz) n = xyz.size(0) dist = (xyz.expand(n, n, 3) - xyz.expand(n, n, 3).transpose(0, 1)).pow(2).sum(dim=2).sqrt() mask = (dist <= cutoff) mask[np.diag_indices(n)] = 0 nbr_list = mask.nonzero() if undirected: nbr_list = nbr_list[nbr_list[:, 1] > nbr_list[:, 0]] return nbr_list
Get neighbor list from xyz positions of atoms. Args: xyz (torch.Tensor or np.array): (N, 3) array with positions of the atoms. cutoff (float): maximum distance to consider atoms as connected. Returns: nbr_list (torch.Tensor): (num_edges, 2) array with the indices of connected atoms.
https://github.com/torchmd/mdgrad/blob/77bd7685b74b41acf54a9483546e1e8cb545eb01/nff/data/graphs.py#L104-L132
import numpy as np import networkx as nx import torch from ase import io import numpy as np import nff DISTANCETHRESHOLDICT_SYMBOL = { ("H", "H"): 1.00, ("H", "Li"): 1.30, ("H", "N"): 1.50, ("H", "C"): 1.30, ("H", "N"): 1.30, ("H", "O"): 1.30, ("H", "F"): 1.30, ("H", "Na"): 1.65, ("H", "Si"): 1.65, ("H", "Mg"): 1.40, ("H", "S"): 1.50, ("H", "Cl"): 1.60, ("H", "Br"): 1.60, ("Li", "C"): 0.0, ("Li", "N"): 0.0, ("Li", "O"): 0.0, ("Li", "F"): 0.0, ("Li", "Mg"): 0.0, ("B", "C"): 1.70, ("B", "N"): 1.70, ("B", "O"): 1.70, ("B", "F"): 1.70, ("B", "Na"): 1.8, ("B", "Mg"): 1.8, ("B", "Cl"): 2.1, ("B", "Br"): 2.1, ("C", "C"): 1.70, ("C", "O"): 1.70, ("C", "N"): 1.8, ("C", "F"): 1.65, ("C", "Na"): 1.80, ("C", "Mg"): 1.70, ("C", "Si"): 2.10, ("C", "S"): 2.20, ("N", "O"): 1.55, ("N", "Na"): 1.70, ("N", "S"): 2.0, ("O", "Na"): 1.70, ("O", "Mg"): 1.35, ("O", "S"): 2.00, ("O", "Cl"): 1.80, ("O", "O"): 1.70, ("O", "F"): 1.50, ("O", "Si"): 1.85, ("O", "Br"): 1.70, ("F", "Mg"): 1.35, } DISTANCETHRESHOLDICT_Z = { (1., 1.): 1.00, (1., 3.): 1.30, (1., 5.): 1.50, (1., 6.): 1.30, (1., 7.): 1.30, (1., 8.): 1.30, (1., 9.): 1.30, (1., 11.): 1.65, (1., 14.): 1.65, (1., 12.): 1.40, (1., 16.): 1.50, (1., 17.): 1.60, (1., 35.): 1.60, (3., 6.): 0.0, (3., 7.): 0.0, (3., 8.): 0.0, (3., 9.): 0.0, (3., 12.): 0.0, (5., 6.): 1.70, (5., 7.): 1.70, (5., 8.): 1.70, (5., 9.): 1.70, (5., 11.): 1.8, (5., 12.): 1.8, (5., 17.): 2.1, (5., 35.): 2.1, (6., 6.): 1.70, (6., 8.): 1.70, (6., 7.): 1.8, (6., 9.): 1.65, (6., 11.): 1.80, (6., 12.): 1.70, (6., 14.): 2.10, (6., 16.): 2.20, (7., 8.): 1.55, (7., 11.): 1.70, (7., 16.): 2.0, (8., 11.): 1.70, (8., 12.): 1.35, (8., 16.): 2.00, (8., 17.): 1.80, (8., 8.): 1.70, (8., 9.): 1.50, (8., 14.): 1.85, (8., 35.): 1.70, (9., 12.): 1.35 }
MIT License
antsx/antspy
ants/utils/mni2tal.py
mni2tal
python
def mni2tal(xin): if (not isinstance(xin, (tuple,list))) or (len(xin) != 3): raise ValueError('xin must be tuple/list with 3 coordinates') x = list(xin) x[0] = x[0] * (-1) x[1] = x[1] * (-1) xout = x if (x[2] >= 0): xout[0] = x[0] * 0.99 xout[1] = x[1] * 0.9688 + 0.046 * x[2] xout[2] = x[1] * (-0.0485) + 0.9189 * x[2] if (x[2] < 0): xout[0] = x[0] * 0.99 xout[1] = x[1] * 0.9688 + 0.042 * x[2] xout[2] = x[1] * (-0.0485) + 0.839 * x[2] return(xout)
mni2tal for converting from ch2/mni space to tal - very approximate. This is a standard approach but it's not very accurate. ANTsR function: `mni2tal` Arguments --------- xin : tuple point in mni152 space. Returns ------- tuple Example ------- >>> import ants >>> ants.mni2tal( (10,12,14) ) References ---------- http://bioimagesuite.yale.edu/mni2tal/501_95733_More\%20Accurate\%20Talairach\%20Coordinates\%20SLIDES.pdf http://imaging.mrc-cbu.cam.ac.uk/imaging/MniTalairach
https://github.com/antsx/antspy/blob/12ab6d831e2e44abd7eb32bd602a5db38d5a0640/ants/utils/mni2tal.py#L4-L53
__all__ = ['mni2tal']
Apache License 2.0
roclark/sportsipy
sportsipy/ncaab/player.py
AbstractPlayer.defensive_rebounds
python
def defensive_rebounds(self): return self._defensive_rebounds
Returns an ``int`` of the total number of defensive rebounds the player grabbed during the season.
https://github.com/roclark/sportsipy/blob/c19f545d3376d62ded6304b137dc69238ac620a9/sportsipy/ncaab/player.py#L324-L329
import pandas as pd import re from functools import wraps from pyquery import PyQuery as pq from .. import utils from .constants import PLAYER_SCHEME def _cleanup(prop): try: prop = prop.replace('%', '') prop = prop.replace(',', '') return prop.replace('+', '') except AttributeError: return '' def _int_property_decorator(func): @property @wraps(func) def wrapper(*args): index = args[0]._index prop = func(*args) value = _cleanup(prop[index]) try: return int(value) except ValueError: return None return wrapper def _float_property_decorator(func): @property @wraps(func) def wrapper(*args): index = args[0]._index prop = func(*args) value = _cleanup(prop[index]) try: return float(value) except ValueError: return None return wrapper class AbstractPlayer: def __init__(self, player_id, player_name, player_data): self._player_data = player_data self._player_id = player_id self._name = player_name self._minutes_played = None self._field_goals = None self._field_goal_attempts = None self._field_goal_percentage = None self._three_pointers = None self._three_point_attempts = None self._three_point_percentage = None self._two_pointers = None self._two_point_attempts = None self._two_point_percentage = None self._free_throws = None self._free_throw_attempts = None self._free_throw_percentage = None self._offensive_rebounds = None self._defensive_rebounds = None self._total_rebounds = None self._assists = None self._steals = None self._blocks = None self._turnovers = None self._personal_fouls = None self._points = None self._true_shooting_percentage = None self._effective_field_goal_percentage = None self._three_point_attempt_rate = None self._free_throw_attempt_rate = None self._offensive_rebound_percentage = None self._defensive_rebound_percentage = None self._total_rebound_percentage = None self._assist_percentage = None self._steal_percentage = None self._block_percentage = None self._turnover_percentage = None self._usage_percentage = None self._parse_player_data(player_data) def _parse_value(self, stats, field): if field == 'conference': value = self._parse_conference(stats) elif field == 'team_abbreviation': value = self._parse_team_abbreviation(stats) else: value = utils._parse_field(PLAYER_SCHEME, stats, field) return value def _parse_player_data(self, player_data): for field in self.__dict__: short_field = str(field)[1:] if short_field == 'player_id' or short_field == 'index' or short_field == 'most_recent_season' or short_field == 'player_data' or short_field == 'name' or short_field == 'height' or short_field == 'weight' or short_field == 'position': continue field_stats = [] if type(player_data) == dict: for year, data in player_data.items(): stats = pq(data['data']) value = self._parse_value(stats, short_field) field_stats.append(value) else: stats = pq(player_data) value = self._parse_value(stats, short_field) field_stats.append(value) setattr(self, field, field_stats) @property def player_id(self): return self._player_id @property def name(self): return self._name @_int_property_decorator def minutes_played(self): return self._minutes_played @_int_property_decorator def field_goals(self): return self._field_goals @_int_property_decorator def field_goal_attempts(self): return self._field_goal_attempts @_float_property_decorator def field_goal_percentage(self): return self._field_goal_percentage @_int_property_decorator def three_pointers(self): return self._three_pointers @_int_property_decorator def three_point_attempts(self): return self._three_point_attempts @_float_property_decorator def three_point_percentage(self): return self._three_point_percentage @_int_property_decorator def two_pointers(self): return self._two_pointers @_int_property_decorator def two_point_attempts(self): return self._two_point_attempts @_float_property_decorator def two_point_percentage(self): return self._two_point_percentage @_float_property_decorator def effective_field_goal_percentage(self): return self._effective_field_goal_percentage @_int_property_decorator def free_throws(self): return self._free_throws @_int_property_decorator def free_throw_attempts(self): return self._free_throw_attempts @_float_property_decorator def free_throw_percentage(self): return self._free_throw_percentage @_int_property_decorator def offensive_rebounds(self): return self._offensive_rebounds @_int_property_decorator
MIT License
googleapis/python-notebooks
google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py
NotebookServiceGrpcAsyncIOTransport.delete_environment
python
def delete_environment( self, ) -> Callable[ [service.DeleteEnvironmentRequest], Awaitable[operations_pb2.Operation] ]: if "delete_environment" not in self._stubs: self._stubs["delete_environment"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/DeleteEnvironment", request_serializer=service.DeleteEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_environment"]
r"""Return a callable for the delete environment method over gRPC. Deletes a single Environment. Returns: Callable[[~.DeleteEnvironmentRequest], Awaitable[~.Operation]]: A function that, when called, will call the underlying RPC on the server.
https://github.com/googleapis/python-notebooks/blob/40bd0e8ca07a1be91be1246e6f8b142b635365d2/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py#L756-L781
import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async from google.api_core import operations_v1 from google.auth import credentials as ga_credentials from google.auth.transport.grpc import SslCredentials import packaging.version import grpc from grpc.experimental import aio from google.cloud.notebooks_v1beta1.types import environment from google.cloud.notebooks_v1beta1.types import instance from google.cloud.notebooks_v1beta1.types import service from google.longrunning import operations_pb2 from .base import NotebookServiceTransport, DEFAULT_CLIENT_INFO from .grpc import NotebookServiceGrpcTransport class NotebookServiceGrpcAsyncIOTransport(NotebookServiceTransport): _grpc_channel: aio.Channel _stubs: Dict[str, Callable] = {} @classmethod def create_channel( cls, host: str = "notebooks.googleapis.com", credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, ) -> aio.Channel: return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, **kwargs, ) def __init__( self, *, host: str = "notebooks.googleapis.com", credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, ) -> None: self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} self._operations_client = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: credentials = False self._grpc_channel = channel self._ssl_channel_credentials = None else: if api_mtls_endpoint: host = api_mtls_endpoint if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials else: if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, credentials=self._credentials, credentials_file=credentials_file, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property def operations_client(self) -> operations_v1.OperationsAsyncClient: if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel ) return self._operations_client @property def list_instances( self, ) -> Callable[ [service.ListInstancesRequest], Awaitable[service.ListInstancesResponse] ]: if "list_instances" not in self._stubs: self._stubs["list_instances"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/ListInstances", request_serializer=service.ListInstancesRequest.serialize, response_deserializer=service.ListInstancesResponse.deserialize, ) return self._stubs["list_instances"] @property def get_instance( self, ) -> Callable[[service.GetInstanceRequest], Awaitable[instance.Instance]]: if "get_instance" not in self._stubs: self._stubs["get_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/GetInstance", request_serializer=service.GetInstanceRequest.serialize, response_deserializer=instance.Instance.deserialize, ) return self._stubs["get_instance"] @property def create_instance( self, ) -> Callable[[service.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]: if "create_instance" not in self._stubs: self._stubs["create_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/CreateInstance", request_serializer=service.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_instance"] @property def register_instance( self, ) -> Callable[ [service.RegisterInstanceRequest], Awaitable[operations_pb2.Operation] ]: if "register_instance" not in self._stubs: self._stubs["register_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/RegisterInstance", request_serializer=service.RegisterInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["register_instance"] @property def set_instance_accelerator( self, ) -> Callable[ [service.SetInstanceAcceleratorRequest], Awaitable[operations_pb2.Operation] ]: if "set_instance_accelerator" not in self._stubs: self._stubs["set_instance_accelerator"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceAccelerator", request_serializer=service.SetInstanceAcceleratorRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["set_instance_accelerator"] @property def set_instance_machine_type( self, ) -> Callable[ [service.SetInstanceMachineTypeRequest], Awaitable[operations_pb2.Operation] ]: if "set_instance_machine_type" not in self._stubs: self._stubs["set_instance_machine_type"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceMachineType", request_serializer=service.SetInstanceMachineTypeRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["set_instance_machine_type"] @property def set_instance_labels( self, ) -> Callable[ [service.SetInstanceLabelsRequest], Awaitable[operations_pb2.Operation] ]: if "set_instance_labels" not in self._stubs: self._stubs["set_instance_labels"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/SetInstanceLabels", request_serializer=service.SetInstanceLabelsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["set_instance_labels"] @property def delete_instance( self, ) -> Callable[[service.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]: if "delete_instance" not in self._stubs: self._stubs["delete_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/DeleteInstance", request_serializer=service.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_instance"] @property def start_instance( self, ) -> Callable[[service.StartInstanceRequest], Awaitable[operations_pb2.Operation]]: if "start_instance" not in self._stubs: self._stubs["start_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/StartInstance", request_serializer=service.StartInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["start_instance"] @property def stop_instance( self, ) -> Callable[[service.StopInstanceRequest], Awaitable[operations_pb2.Operation]]: if "stop_instance" not in self._stubs: self._stubs["stop_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/StopInstance", request_serializer=service.StopInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["stop_instance"] @property def reset_instance( self, ) -> Callable[[service.ResetInstanceRequest], Awaitable[operations_pb2.Operation]]: if "reset_instance" not in self._stubs: self._stubs["reset_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/ResetInstance", request_serializer=service.ResetInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["reset_instance"] @property def report_instance_info( self, ) -> Callable[ [service.ReportInstanceInfoRequest], Awaitable[operations_pb2.Operation] ]: if "report_instance_info" not in self._stubs: self._stubs["report_instance_info"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/ReportInstanceInfo", request_serializer=service.ReportInstanceInfoRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["report_instance_info"] @property def is_instance_upgradeable( self, ) -> Callable[ [service.IsInstanceUpgradeableRequest], Awaitable[service.IsInstanceUpgradeableResponse], ]: if "is_instance_upgradeable" not in self._stubs: self._stubs["is_instance_upgradeable"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/IsInstanceUpgradeable", request_serializer=service.IsInstanceUpgradeableRequest.serialize, response_deserializer=service.IsInstanceUpgradeableResponse.deserialize, ) return self._stubs["is_instance_upgradeable"] @property def upgrade_instance( self, ) -> Callable[ [service.UpgradeInstanceRequest], Awaitable[operations_pb2.Operation] ]: if "upgrade_instance" not in self._stubs: self._stubs["upgrade_instance"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstance", request_serializer=service.UpgradeInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["upgrade_instance"] @property def upgrade_instance_internal( self, ) -> Callable[ [service.UpgradeInstanceInternalRequest], Awaitable[operations_pb2.Operation] ]: if "upgrade_instance_internal" not in self._stubs: self._stubs["upgrade_instance_internal"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/UpgradeInstanceInternal", request_serializer=service.UpgradeInstanceInternalRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["upgrade_instance_internal"] @property def list_environments( self, ) -> Callable[ [service.ListEnvironmentsRequest], Awaitable[service.ListEnvironmentsResponse] ]: if "list_environments" not in self._stubs: self._stubs["list_environments"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/ListEnvironments", request_serializer=service.ListEnvironmentsRequest.serialize, response_deserializer=service.ListEnvironmentsResponse.deserialize, ) return self._stubs["list_environments"] @property def get_environment( self, ) -> Callable[[service.GetEnvironmentRequest], Awaitable[environment.Environment]]: if "get_environment" not in self._stubs: self._stubs["get_environment"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/GetEnvironment", request_serializer=service.GetEnvironmentRequest.serialize, response_deserializer=environment.Environment.deserialize, ) return self._stubs["get_environment"] @property def create_environment( self, ) -> Callable[ [service.CreateEnvironmentRequest], Awaitable[operations_pb2.Operation] ]: if "create_environment" not in self._stubs: self._stubs["create_environment"] = self.grpc_channel.unary_unary( "/google.cloud.notebooks.v1beta1.NotebookService/CreateEnvironment", request_serializer=service.CreateEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_environment"] @property
Apache License 2.0
alexa/alexa-apis-for-python
ask-sdk-model/ask_sdk_model/interfaces/display/list_template1.py
ListTemplate1.__init__
python
def __init__(self, token=None, back_button=None, background_image=None, title=None, list_items=None): self.__discriminator_value = "ListTemplate1" self.object_type = self.__discriminator_value super(ListTemplate1, self).__init__(object_type=self.__discriminator_value, token=token, back_button=back_button) self.background_image = background_image self.title = title self.list_items = list_items
:param token: :type token: (optional) str :param back_button: :type back_button: (optional) ask_sdk_model.interfaces.display.back_button_behavior.BackButtonBehavior :param background_image: :type background_image: (optional) ask_sdk_model.interfaces.display.image.Image :param title: :type title: (optional) str :param list_items: :type list_items: (optional) list[ask_sdk_model.interfaces.display.list_item.ListItem]
https://github.com/alexa/alexa-apis-for-python/blob/bfe5e694daaca71bfb1a4199ca8d2514f1cac6c9/ask-sdk-model/ask_sdk_model/interfaces/display/list_template1.py#L66-L87
import pprint import re import six import typing from enum import Enum from ask_sdk_model.interfaces.display.template import Template if typing.TYPE_CHECKING: from typing import Dict, List, Optional, Union, Any from datetime import datetime from ask_sdk_model.interfaces.display.list_item import ListItem as ListItem_79a19afb from ask_sdk_model.interfaces.display.image import Image as Image_1942d978 from ask_sdk_model.interfaces.display.back_button_behavior import BackButtonBehavior as BackButtonBehavior_46c3eb02 class ListTemplate1(Template): deserialized_types = { 'object_type': 'str', 'token': 'str', 'back_button': 'ask_sdk_model.interfaces.display.back_button_behavior.BackButtonBehavior', 'background_image': 'ask_sdk_model.interfaces.display.image.Image', 'title': 'str', 'list_items': 'list[ask_sdk_model.interfaces.display.list_item.ListItem]' } attribute_map = { 'object_type': 'type', 'token': 'token', 'back_button': 'backButton', 'background_image': 'backgroundImage', 'title': 'title', 'list_items': 'listItems' } supports_multiple_types = False
Apache License 2.0
mozilla/firefox-flicks
vendor-local/lib/python/celery/utils/serialization.py
create_exception_cls
python
def create_exception_cls(name, module, parent=None): if not parent: parent = Exception return subclass_exception(name, parent, module)
Dynamically create an exception class.
https://github.com/mozilla/firefox-flicks/blob/ad19ed59aac682744badae6d19a149327037f293/vendor-local/lib/python/celery/utils/serialization.py#L94-L98
from __future__ import absolute_import import inspect import sys import types import pickle as pypickle try: import cPickle as cpickle except ImportError: cpickle = None from .encoding import safe_repr if sys.version_info < (2, 6): pickle = pypickle else: pickle = cpickle or pypickle unwanted_base_classes = (StandardError, Exception, BaseException, object) if sys.version_info < (2, 5): def subclass_exception(name, parent, unused): return types.ClassType(name, (parent,), {}) else: def subclass_exception(name, parent, module): return type(name, (parent,), {'__module__': module}) def find_nearest_pickleable_exception(exc): cls = exc.__class__ getmro_ = getattr(cls, 'mro', None) if not getmro_: if not getattr(cls, '__bases__', ()): return getmro_ = lambda: inspect.getmro(cls) for supercls in getmro_(): if supercls in unwanted_base_classes: return try: exc_args = getattr(exc, 'args', []) superexc = supercls(*exc_args) pickle.loads(pickle.dumps(superexc)) except: pass else: return superexc
BSD 3-Clause New or Revised License
openstack/ironic
ironic/drivers/modules/agent_power.py
AgentPower.get_supported_power_states
python
def get_supported_power_states(self, task): return [states.REBOOT, states.SOFT_REBOOT]
Get a list of the supported power states. Only contains REBOOT. :param task: A TaskManager instance containing the node to act on. :returns: A list with the supported power states defined in :mod:`ironic.common.states`.
https://github.com/openstack/ironic/blob/a4a6f26333be31b84a9b1a874dde506e61d407d3/ironic/drivers/modules/agent_power.py#L86-L95
import time from oslo_config import cfg from oslo_log import log import tenacity from ironic.common import exception from ironic.common.i18n import _ from ironic.common import states from ironic.conductor import utils as cond_utils from ironic.drivers import base from ironic.drivers.modules import agent_client CONF = cfg.CONF LOG = log.getLogger(__name__) _POWER_WAIT = 30 class AgentPower(base.PowerInterface): def __init__(self): super(AgentPower, self).__init__() if not CONF.deploy.fast_track: raise exception.InvalidParameterValue( _('[deploy]fast_track must be True to enable the agent ' 'power interface')) self._client = agent_client.AgentClient() def get_properties(self): return {} def validate(self, task): if not CONF.deploy.fast_track: raise exception.InvalidParameterValue( _('[deploy]fast_track must be True to enable the agent ' 'power interface')) if not cond_utils.agent_is_alive(task.node): raise exception.InvalidParameterValue( _('Agent seems offline for node %s, the agent power interface ' 'cannot be used') % task.node.uuid) def supports_power_sync(self, task): return False
Apache License 2.0
mattochal/imbalanced_fsl_public
src/models/model_template.py
ModelTemplate.net_eval
python
def net_eval(self, target_set, ptracker): if len(target_set) == 0: return torch.tensor(0.).to(self.device) targets_x, targets_y = target_set pred_y = self.backbone(targets_x) loss = self.strategy.apply_outer_loss(self.loss_fn, pred_y, targets_y) ptracker.add_task_performance( pred_y.detach().cpu().numpy(), targets_y.detach().cpu().numpy(), loss.detach().cpu().numpy()) return loss
Used mainly by demo app. Network evaluation on target set after the inner loop / adaptation process
https://github.com/mattochal/imbalanced_fsl_public/blob/d654a9898e19bf4278af8a4bfcebef5950c615e0/src/models/model_template.py#L103-L120
import torch.nn as nn import torch import time import sys import argparse class ModelTemplate(nn.Module): @staticmethod def get_parser(parser=None): if parser is None: parser = argparse.ArgumentParser() parser.add_argument('--seed', type=int, default=-1, help='seed, if -1 set in code') parser.add_argument('--lr', type=float, default=0.001, help='learning rate') parser.add_argument('--lr_decay', type=float, default=1.0,help='learning rate decay') parser.add_argument('--lr_decay_step', type=float, default=1, help='learning rate decay step size') return parser def __init__(self, backbone, strategy, args, device): super().__init__() self.backbone = backbone self.strategy = strategy self.args = args self.device = device self.mode = None self.epoch = -1 def setup_model(self): self.loss_fn = None self.optimizer = torch.optim.Adam(self.backbone.parameters(), lr=self.args.lr) self.lr_scheduler = torch.optim.lr_scheduler.StepLR( self.optimizer, step_size=self.args.lr_decay_step, gamma=self.args.lr_decay) def meta_train(self, task, ptracker): self.mode='train' self.train() self.net_reset() total_losses = [] for support_set, target_set in task: self.net_train(support_set) loss = self.net_eval(target_set, ptracker) total_losses.append(loss) self.optimizer.zero_grad() loss = torch.stack(total_losses).sum(0) loss.backward() self.optimizer.step() self.total_losses = [] def meta_test(self, task, ptracker): self.mode='test' self.eval() self.meta_eval(task, ptracker) def meta_val(self, task, ptracker): self.mode='val' self.eval() self.meta_eval(task, ptracker) def meta_eval(self, task, ptracker): with torch.no_grad(): self.net_reset() for support_set, target_set in task: self.net_train(support_set) self.net_eval(target_set, ptracker) self.net_post() def net_reset(self): self.strategy.reset() def net_train(self, support_set): support_set = self.strategy.update_support_set(support_set)
MIT License
siviltaram/persona-dialogue-generation
parlai/core/utils.py
TimeLogger.__init__
python
def __init__(self): self.timer = Timer() self.tot_time = 0
Set up timer.
https://github.com/siviltaram/persona-dialogue-generation/blob/3cc800ffe3c5a8d16ed26522cda839acfab8d417/parlai/core/utils.py#L224-L227
from collections import deque from functools import lru_cache import math import os import random import time import warnings import heapq try: import torch __TORCH_AVAILABLE = True except ImportError: __TORCH_AVAILABLE = False NEAR_INF = 1e20 DISPLAY_MESSAGE_DEFAULT_FIELDS = { 'episode_done', 'id', 'image', 'text', 'labels', 'eval_labels', 'label_candidates', 'text_candidates', 'reward', 'eval_labels_vec', 'text_vec', 'label_candidates_vecs' } def maintain_dialog_history(history, observation, reply='', historyLength=1, useReplies='label_else_model', dict=None, useStartEndIndices=True, splitSentences=False): def parse(txt, splitSentences): if dict is not None: if splitSentences: vec = [dict.txt2vec(t) for t in txt.split('\n')] else: vec = dict.txt2vec(txt) return vec else: return [txt] if 'dialog' not in history: history['dialog'] = deque(maxlen=historyLength) history['episode_done'] = False history['labels'] = [] if history['episode_done']: history['dialog'].clear() history['labels'] = [] useReplies = 'none' history['episode_done'] = False if useReplies != 'none': if useReplies == 'model' or (useReplies == 'label_else_model' and len(history['labels']) == 0): if reply: if useStartEndIndices: reply = dict.start_token + ' ' + reply history['dialog'].extend(parse(reply, splitSentences)) elif len(history['labels']) > 0: r = history['labels'][0] history['dialog'].extend(parse(r, splitSentences)) obs = observation if 'text' in obs: if useStartEndIndices: obs['text'] = dict.end_token + ' ' + obs['text'] history['dialog'].extend(parse(obs['text'], splitSentences)) history['episode_done'] = obs['episode_done'] labels = obs.get('labels', obs.get('eval_labels', None)) if labels is not None: if useStartEndIndices: history['labels'] = [dict.start_token + ' ' + l for l in labels] else: history['labels'] = labels return history['dialog'] def load_cands(path, lines_have_ids=False, cands_are_replies=False): if path is None: return None cands = [] cnt = 0 with open(path) as read: for line in read: line = line.strip().replace('\\n', '\n') if len(line) > 0: cnt = cnt + 1 if cnt == 1 and line[0:2] == '1 ': lines_have_ids = True if '\t' in line and not cands_are_replies: cands_are_replies = True cands = [] if lines_have_ids: space_idx = line.find(' ') line = line[space_idx + 1:] if cands_are_replies: sp = line.split('\t') if len(sp) > 1 and sp[1] != '': cands.append(sp[1]) else: cands.append(line) else: cands.append(line) return cands class Predictor(object): def __init__(self, args=None, **kwargs): from parlai.core.params import ParlaiParser from parlai.core.agents import create_agent if args is None: args = [] for k, v in kwargs.items(): args.append('--' + str(k).replace('_', '-')) args.append(str(v)) parser = ParlaiParser(True, True) self.opt = parser.parse_args(args) self.agent = create_agent(self.opt) def predict(self, observation): if 'episode_done' not in observation: observation['episode_done'] = True self.agent.observe(observation) reply = self.agent.act() return reply class Timer(object): def __init__(self): self.running = True self.total = 0 self.start = time.time() def reset(self): self.running = True self.total = 0 self.start = time.time() return self def resume(self): if not self.running: self.running = True self.start = time.time() return self def stop(self): if self.running: self.running = False self.total += time.time() - self.start return self def time(self): if self.running: return self.total + time.time() - self.start return self.total class TimeLogger():
MIT License
earwig/mwparserfromhell
src/mwparserfromhell/nodes/external_link.py
ExternalLink.brackets
python
def brackets(self): return self._brackets
Whether to enclose the URL in brackets or display it straight.
https://github.com/earwig/mwparserfromhell/blob/abcf6298aa53f536f6626be1d8fd4a863afed878/src/mwparserfromhell/nodes/external_link.py#L79-L81
from ._base import Node from ..utils import parse_anything __all__ = ["ExternalLink"] class ExternalLink(Node): def __init__(self, url, title=None, brackets=True, suppress_space=False): super().__init__() self.url = url self.title = title self.brackets = brackets self.suppress_space = suppress_space def __str__(self): if self.brackets: if self.title is not None: if self.suppress_space is True: return "[" + str(self.url) + str(self.title) + "]" return "[" + str(self.url) + " " + str(self.title) + "]" return "[" + str(self.url) + "]" return str(self.url) def __children__(self): yield self.url if self.title is not None: yield self.title def __strip__(self, **kwargs): if self.brackets: if self.title: return self.title.strip_code(**kwargs) return None return self.url.strip_code(**kwargs) def __showtree__(self, write, get, mark): if self.brackets: write("[") get(self.url) if self.title is not None: get(self.title) if self.brackets: write("]") @property def url(self): return self._url @property def title(self): return self._title @property
MIT License
smnorris/bcdata
bcdata/wfs.py
make_request
python
def make_request(parameters): r = requests.get(bcdata.WFS_URL, params=parameters) log.debug(r.url) return r.json()["features"]
Submit a getfeature request to DataBC WFS and return features
https://github.com/smnorris/bcdata/blob/ba04920ef259688f3a8ac0389da14feb4d986151/bcdata/wfs.py#L138-L143
from datetime import datetime from datetime import timedelta import json import logging import math import os from pathlib import Path from urllib.parse import urlparse import sys import warnings import xml.etree.ElementTree as ET from concurrent.futures import ThreadPoolExecutor from owslib.wfs import WebFeatureService import requests import geopandas as gpd import bcdata if not sys.warnoptions: warnings.simplefilter("ignore") log = logging.getLogger(__name__) def get_sortkey(table): wfs = WebFeatureService(url=bcdata.OWS_URL, version="2.0.0") columns = list(wfs.get_schema("pub:" + table)["properties"].keys()) if "OBJECTID" in columns: return "OBJECTID" elif "SEQUENCE_ID" in columns: return "SEQUENCE_ID" else: return columns[0] def check_cache(path): if not os.path.exists(path): return True else: mod_date = datetime.fromtimestamp(os.path.getmtime(path)) if mod_date < (datetime.now() - timedelta(days=1)): return True else: return False def get_table_name(package): package = package.lower() params = {"id": package} r = requests.get(bcdata.BCDC_API_URL + "package_show", params=params) if r.status_code != 200: raise ValueError("{d} is not present in DataBC API list".format(d=package)) result = r.json()["result"] layer_urls = [r["url"] for r in result["resources"] if r["format"] == "wms"] layer_names = [urlparse(l).path.split("/")[3] for l in layer_urls] if len(layer_names) > 1: raise ValueError( "Package {} includes more than one WFS resource, specify one of the following: \n{}".format( package, "\n".join(layer_names) ) ) return layer_names[0] def validate_name(dataset): if dataset.upper() in list_tables(): return dataset.upper() else: return get_table_name(dataset.upper()) def list_tables(refresh=False, cache_file=None): if not cache_file: cache_file = os.path.join(str(Path.home()), ".bcdata") if refresh or check_cache(cache_file): wfs = WebFeatureService(url=bcdata.OWS_URL, version="2.0.0") bcdata_objects = [i.strip("pub:") for i in list(wfs.contents)] with open(cache_file, "w") as outfile: json.dump(sorted(bcdata_objects), outfile) else: with open(cache_file, "r") as infile: bcdata_objects = json.load(infile) return bcdata_objects def get_count(dataset, query=None): table = validate_name(dataset) payload = { "service": "WFS", "version": "2.0.0", "request": "GetFeature", "typeName": table, "resultType": "hits", "outputFormat": "json", } if query: payload["CQL_FILTER"] = query r = requests.get(bcdata.WFS_URL, params=payload) return int(ET.fromstring(r.text).attrib["numberMatched"])
MIT License
karmab/kcli
kvirt/providers/sampleprovider.py
Kbase.info
python
def info(self, name, output='plain', fields=[], values=False, vm=None, debug=False): print("not implemented") return {'result': 'success'}
:param name: :param output: :param fields: :param values: :return:
https://github.com/karmab/kcli/blob/e53cc1c9a7ece7ad5dbc71b79e995629e7259385/kvirt/providers/sampleprovider.py#L231-L241
class Kbase(object): def __init__(self, host='127.0.0.1', port=None, user='root', debug=False): self.conn = 'base' return def close(self): print("not implemented") return def exists(self, name): return def net_exists(self, name): print("not implemented") return def disk_exists(self, pool, name): print("not implemented") def create(self, name, virttype=None, profile='', flavor=None, plan='kvirt', cpumodel='Westmere', cpuflags=[], cpupinning=[], numcpus=2, memory=512, guestid='guestrhel764', pool='default', image=None, disks=[{'size': 10}], disksize=10, diskthin=True, diskinterface='virtio', nets=['default'], iso=None, vnc=False, cloudinit=True, reserveip=False, reservedns=False, reservehost=False, start=True, keys=None, cmds=[], ips=None, netmasks=None, gateway=None, nested=True, dns=None, domain=None, tunnel=False, files=[], enableroot=True, alias=[], overrides={}, tags=[], storemetadata=False, sharedfolders=[], kernel=None, initrd=None, cmdline=None, cpuhotplug=False, memoryhotplug=False, numamode=None, numa=[], pcidevices=[], tpm=False, placement=[], autostart=False, rng=False, metadata={}, securitygroups=[]): print("not implemented") return {'result': 'success'} def start(self, name): print("not implemented") return {'result': 'success'} def stop(self, name): print("not implemented") return {'result': 'success'} def snapshot(self, name, base, revert=False, delete=False, listing=False): print("not implemented") return def restart(self, name): print("not implemented") return {'result': 'success'} def report(self): print("not implemented") return def status(self, name): print("not implemented") return def list(self): print("not implemented") return [] def console(self, name, tunnel=False, web=False): print("not implemented") return def serialconsole(self, name, web=False): print("not implemented") return
Apache License 2.0
qubole/qds-sdk-py
qds_sdk/cluster.py
Cluster.reassign_label
python
def reassign_label(cls, destination_cluster, label): conn = Qubole.agent(version=Cluster.api_version) data = { "destination_cluster": destination_cluster, "label": label } return conn.put(cls.rest_entity_path + "/reassign-label", data)
Reassign a label from one cluster to another. Args: `destination_cluster`: id/label of the cluster to move the label to `label`: label to be moved from the source cluster
https://github.com/qubole/qds-sdk-py/blob/fc18ceb4f8889ef1810b1f311f6d507b75cb1ddc/qds_sdk/cluster.py#L573-L587
from qds_sdk.qubole import Qubole from qds_sdk.resource import Resource from argparse import ArgumentParser from qds_sdk import util import logging import json log = logging.getLogger("qds_cluster") def str2bool(v): return v.lower() in ("yes", "true", "t", "1") class Cluster(Resource): rest_entity_path = "clusters" api_version = "v1.2" @classmethod def _parse_list(cls, args): argparser = ArgumentParser(prog="cluster list") group = argparser.add_mutually_exclusive_group() group.add_argument("--id", dest="cluster_id", help="show cluster with this id") group.add_argument("--label", dest="label", help="show cluster with this label") group.add_argument("--state", dest="state", action="store", choices=['up', 'down', 'pending', 'terminating'], help="list only clusters in the given state") pagination_group = group.add_argument_group() pagination_group.add_argument("--page", dest="page", action="store", type=int, help="page number") pagination_group.add_argument("--per-page", dest="per_page", action="store", type=int, help="number of clusters to be retrieved per page") arguments = argparser.parse_args(args) return vars(arguments) @classmethod def list(cls, state=None, page=None, per_page=None): conn = Qubole.agent() params = {} if page: params['page'] = page if per_page: params['per_page'] = per_page if (params.get('page') or params.get('per_page')) and Qubole.version == 'v1.2': log.warn("Pagination is not supported with API v1.2. Fetching all clusters.") params = None if not params else params cluster_list = conn.get(cls.rest_entity_path, params=params) if state is None: return cluster_list elif state is not None: result = [] if Qubole.version == 'v1.2': for cluster in cluster_list: if state.lower() == cluster['cluster']['state'].lower(): result.append(cluster) elif Qubole.version == 'v1.3': cluster_list = cluster_list['clusters'] for cluster in cluster_list: if state.lower() == cluster['state'].lower(): result.append(cluster) return result @classmethod def show(cls, cluster_id_label): conn = Qubole.agent() return conn.get(cls.element_path(cluster_id_label)) @classmethod def status(cls, cluster_id_label): conn = Qubole.agent(version=Cluster.api_version) return conn.get(cls.element_path(cluster_id_label) + "/state") @classmethod def master(cls, cluster_id_label): cluster_status = cls.status(cluster_id_label) if cluster_status.get("state") == 'UP': return list(filter(lambda x: x["role"] == "master", cluster_status.get("nodes")))[0] else: return cluster_status @classmethod def start(cls, cluster_id_label, api_version=None): conn = Qubole.agent(version=api_version) data = {"state": "start"} return conn.put(cls.element_path(cluster_id_label) + "/state", data) @classmethod def terminate(cls, cluster_id_label): conn = Qubole.agent(version=Cluster.api_version) data = {"state": "terminate"} return conn.put(cls.element_path(cluster_id_label) + "/state", data) @classmethod def _parse_create_update(cls, args, action, api_version): argparser = ArgumentParser(prog="cluster %s" % action) create_required = False label_required = False if action == "create": create_required = True elif action == "update": argparser.add_argument("cluster_id_label", help="id/label of the cluster to update") elif action == "clone": argparser.add_argument("cluster_id_label", help="id/label of the cluster to update") label_required = True argparser.add_argument("--label", dest="label", nargs="+", required=(create_required or label_required), help="list of labels for the cluster" + " (atleast one label is required)") ec2_group = argparser.add_argument_group("ec2 settings") ec2_group.add_argument("--access-key-id", dest="aws_access_key_id", help="access key id for customer's aws" + " account. This is required while" + " creating the cluster",) ec2_group.add_argument("--secret-access-key", dest="aws_secret_access_key", help="secret access key for customer's aws" + " account. This is required while" + " creating the cluster",) ec2_group.add_argument("--aws-region", dest="aws_region", choices=["us-east-1", "us-west-2", "ap-northeast-1", "sa-east-1", "eu-west-1", "ap-southeast-1", "us-west-1"], help="aws region to create the cluster in",) ec2_group.add_argument("--aws-availability-zone", dest="aws_availability_zone", help="availability zone to" + " create the cluster in",) ec2_group.add_argument("--subnet-id", dest="subnet_id", help="subnet to create the cluster in",) ec2_group.add_argument("--vpc-id", dest="vpc_id", help="vpc to create the cluster in",) ec2_group.add_argument("--master-elastic-ip", dest="master_elastic_ip", help="elastic ip to attach to master",) ec2_group.add_argument("--bastion-node-public-dns", dest="bastion_node_public_dns", help="public dns name of the bastion node. Required only if cluster is in private subnet of a EC2-VPC",) ec2_group.add_argument("--role-instance-profile", dest="role_instance_profile", help="IAM Role instance profile to attach on cluster",) hadoop_group = argparser.add_argument_group("hadoop settings") node_config_group = argparser.add_argument_group("node configuration") if (api_version >= 1.3) else hadoop_group node_config_group.add_argument("--master-instance-type", dest="master_instance_type", help="instance type to use for the hadoop" + " master node",) node_config_group.add_argument("--slave-instance-type", dest="slave_instance_type", help="instance type to use for the hadoop" + " slave nodes",) node_config_group.add_argument("--initial-nodes", dest="initial_nodes", type=int, help="number of nodes to start the" + " cluster with",) node_config_group.add_argument("--max-nodes", dest="max_nodes", type=int, help="maximum number of nodes the cluster" + " may be auto-scaled up to") node_config_group.add_argument("--slave-request-type", dest="slave_request_type", choices=["ondemand", "spot", "hybrid", "spotblock"], help="purchasing option for slave instaces",) node_config_group.add_argument("--root-volume-size", dest="root_volume_size", type=int, help="size of root volume in GB") hadoop_group.add_argument("--custom-config", dest="custom_config_file", help="location of file containg custom" + " hadoop configuration overrides") hadoop_group.add_argument("--use-hbase", dest="use_hbase", action="store_true", default=None, help="Use hbase on this cluster",) hadoop_group.add_argument("--is-ha", dest="is_ha", action="store_true", default=None, help="Enable HA config for cluster") if api_version >= 1.3: qubole_placement_policy_group = hadoop_group.add_mutually_exclusive_group() qubole_placement_policy_group.add_argument("--use-qubole-placement-policy", dest="use_qubole_placement_policy", action="store_true", default=None, help="Use Qubole Block Placement policy" + " for clusters with spot nodes",) qubole_placement_policy_group.add_argument("--no-use-qubole-placement-policy", dest="use_qubole_placement_policy", action="store_false", default=None, help="Do not use Qubole Block Placement policy" + " for clusters with spot nodes",) fallback_to_ondemand_group = node_config_group.add_mutually_exclusive_group() fallback_to_ondemand_group.add_argument("--fallback-to-ondemand", dest="fallback_to_ondemand", action="store_true", default=None, help="Fallback to on-demand nodes if spot nodes" + " could not be obtained. Valid only if slave_request_type is spot",) fallback_to_ondemand_group.add_argument("--no-fallback-to-ondemand", dest="fallback_to_ondemand", action="store_false", default=None, help="Dont Fallback to on-demand nodes if spot nodes" + " could not be obtained. Valid only if slave_request_type is spot",) node_cooldown_period_group = argparser.add_argument_group("node cooldown period settings") node_cooldown_period_group.add_argument("--node-base-cooldown-period", dest="node_base_cooldown_period", type=int, help="Cooldown period for on-demand nodes" + " unit: minutes") node_cooldown_period_group.add_argument("--node-spot-cooldown-period", dest="node_spot_cooldown_period", type=int, help="Cooldown period for spot nodes" + " unit: minutes") ebs_volume_group = argparser.add_argument_group("ebs volume settings") ebs_volume_group.add_argument("--ebs-volume-count", dest="ebs_volume_count", type=int, help="Number of EBS volumes to attach to" + " each instance of the cluster",) ebs_volume_group.add_argument("--ebs-volume-type", dest="ebs_volume_type", choices=["standard", "gp2"], help=" of the EBS volume. Valid values are " + "'standard' (magnetic) and 'gp2' (ssd).",) ebs_volume_group.add_argument("--ebs-volume-size", dest="ebs_volume_size", type=int, help="Size of each EBS volume, in GB",) enable_rubix_group = hadoop_group.add_mutually_exclusive_group() enable_rubix_group.add_argument("--enable-rubix", dest="enable_rubix", action="store_true", default=None, help="Enable rubix for cluster", ) enable_rubix_group.add_argument("--no-enable-rubix", dest="enable_rubix", action="store_false", default=None, help="Do not enable rubix for cluster", ) hadoop2 = hadoop_group.add_mutually_exclusive_group() hadoop2.add_argument("--use-hadoop2", dest="use_hadoop2", action="store_true", default=None, help="Use hadoop2 instead of hadoop1") hadoop2.add_argument("--use-hadoop1", dest="use_hadoop2", action="store_false", default=None, help="Use hadoop1 instead of hadoop2. This is the default.") hadoop2.add_argument("--use-spark", dest="use_spark", action="store_true", default=None, help="Turn on spark for this cluster") spot_group = argparser.add_argument_group("spot instance settings" + " (valid only when slave-request-type is hybrid or spot)") spot_group.add_argument("--maximum-bid-price-percentage", dest="maximum_bid_price_percentage", type=float, help="maximum value to bid for spot instances" + " expressed as a percentage of the base" + " price for the slave node instance type",) spot_group.add_argument("--timeout-for-spot-request", dest="timeout_for_request", type=int, help="timeout for a spot instance request" + " unit: minutes") spot_group.add_argument("--maximum-spot-instance-percentage", dest="maximum_spot_instance_percentage", type=int, help="maximum percentage of instances that may" + " be purchased from the aws spot market," + " valid only when slave-request-type" + " is 'hybrid'",) stable_spot_group = argparser.add_argument_group("stable spot instance settings") stable_spot_group.add_argument("--stable-maximum-bid-price-percentage", dest="stable_maximum_bid_price_percentage", type=float, help="maximum value to bid for stable node spot instances" + " expressed as a percentage of the base" + " price for the master and slave node instance types",) stable_spot_group.add_argument("--stable-timeout-for-spot-request", dest="stable_timeout_for_request", type=int, help="timeout for a stable node spot instance request" + " unit: minutes") stable_spot_group.add_argument("--stable-allow-fallback", dest="stable_allow_fallback", default=None, type=str2bool, help="whether to fallback to on-demand instances for stable nodes" + " if spot instances aren't available") spot_block_group = argparser.add_argument_group("spot block settings") spot_block_group.add_argument("--spot-block-duration", dest="spot_block_duration", type=int, help="spot block duration" + " unit: minutes") fairscheduler_group = argparser.add_argument_group( "fairscheduler configuration options") fairscheduler_group.add_argument("--fairscheduler-config-xml", dest="fairscheduler_config_xml_file", help="location for file containing" + " xml with custom configuration" + " for the fairscheduler",) fairscheduler_group.add_argument("--fairscheduler-default-pool", dest="default_pool", help="default pool for the" + " fairscheduler",) security_group = argparser.add_argument_group("security setttings") ephemerals = security_group.add_mutually_exclusive_group() ephemerals.add_argument("--encrypted-ephemerals", dest="encrypted_ephemerals", action="store_true", default=None, help="encrypt the ephemeral drives on" + " the instance",) ephemerals.add_argument("--no-encrypted-ephemerals", dest="encrypted_ephemerals", action="store_false", default=None, help="don't encrypt the ephemeral drives on" + " the instance",) security_group.add_argument("--customer-ssh-key", dest="customer_ssh_key_file", help="location for ssh key to use to" + " login to the instance") security_group.add_argument("--persistent-security-group", dest="persistent_security_group", help="a security group to associate with each" + " node of the cluster. Typically used" + " to provide access to external hosts") presto_group = argparser.add_argument_group("presto settings") enabling_presto = presto_group.add_mutually_exclusive_group() enabling_presto.add_argument("--enable-presto", dest="enable_presto", action="store_true", default=None, help="Enable presto for this cluster",) enabling_presto.add_argument("--disable-presto", dest="enable_presto", action="store_false", default=None, help="Disable presto for this cluster",) presto_group.add_argument("--presto-custom-config", dest="presto_custom_config_file", help="location of file containg custom" + " presto configuration overrides") termination = argparser.add_mutually_exclusive_group() termination.add_argument("--disallow-cluster-termination", dest="disallow_cluster_termination", action="store_true", default=None, help="don't auto-terminate idle clusters," + " use this with extreme caution",) termination.add_argument("--allow-cluster-termination", dest="disallow_cluster_termination", action="store_false", default=None, help="auto-terminate idle clusters,") ganglia = argparser.add_mutually_exclusive_group() ganglia.add_argument("--enable-ganglia-monitoring", dest="enable_ganglia_monitoring", action="store_true", default=None, help="enable ganglia monitoring for the" + " cluster",) ganglia.add_argument("--disable-ganglia-monitoring", dest="enable_ganglia_monitoring", action="store_false", default=None, help="disable ganglia monitoring for the" + " cluster",) argparser.add_argument("--node-bootstrap-file", dest="node_bootstrap_file", help="""name of the node bootstrap file for this cluster. It should be in stored in S3 at <account-default-location>/scripts/hadoop/NODE_BOOTSTRAP_FILE """,) argparser.add_argument("--custom-ec2-tags", dest="custom_ec2_tags", help="""Custom ec2 tags to be set on all instances of the cluster. Specified as JSON object (key-value pairs) e.g. --custom-ec2-tags '{"key1":"value1", "key2":"value2"}' """,) env_group = argparser.add_argument_group("environment settings") env_group.add_argument("--env-name", dest="env_name", default=None, help="name of Python and R environment") env_group.add_argument("--python-version", dest="python_version", default=None, help="version of Python in environment") env_group.add_argument("--r-version", dest="r_version", default=None, help="version of R in environment") arguments = argparser.parse_args(args) return arguments @classmethod def create(cls, cluster_info, version=None): conn = Qubole.agent(version=version) return conn.post(cls.rest_entity_path, data=cluster_info) @classmethod def update(cls, cluster_id_label, cluster_info, version=None): conn = Qubole.agent(version=version) return conn.put(cls.element_path(cluster_id_label), data=cluster_info) @classmethod def clone(cls, cluster_id_label, cluster_info, version=None): conn = Qubole.agent(version=version) return conn.post(cls.element_path(cluster_id_label) + '/clone', data=cluster_info) @classmethod def _parse_cluster_manage_command(cls, args, action): argparser = ArgumentParser(prog="cluster_manage_command") group = argparser.add_mutually_exclusive_group(required=True) group.add_argument("--id", dest="cluster_id", help="execute on cluster with this id") group.add_argument("--label", dest="label", help="execute on cluster with this label") if action == "remove" or action == "update": argparser.add_argument("--private_dns", help="the private_dns of the machine to be updated/removed", required=True) if action == "update": argparser.add_argument("--command", help="the update command to be executed", required=True, choices=["replace"]) arguments = argparser.parse_args(args) return arguments @classmethod def _parse_reassign_label(cls, args): argparser = ArgumentParser(prog="cluster reassign_label") argparser.add_argument("destination_cluster", metavar="destination_cluster_id_label", help="id/label of the cluster to move the label to") argparser.add_argument("label", help="label to be moved from the source cluster") arguments = argparser.parse_args(args) return arguments @classmethod
Apache License 2.0
facebookresearch/fastmri
banding_removal/fastmri/base_trainer.py
BaseTrainer.preprocess_data_tensor
python
def preprocess_data_tensor(self, t): return t.to(self.device, non_blocking=True)
Override to cast
https://github.com/facebookresearch/fastmri/blob/3f9acefc6f740c789e1b720f944ab7821c319226/banding_removal/fastmri/base_trainer.py#L250-L252
import logging import os import sys import pdb import pickle import math import datetime import random import functools from collections import OrderedDict, namedtuple from timeit import default_timer as timer import torch from torch import nn import torch.backends.cudnn as cudnn from torch.utils.data import DataLoader from torch.utils.data.sampler import RandomSampler, SequentialSampler from fastmri.common.utils import CallbackDataset from . import model from . import optimizer from .data.mri_data import SliceData class BaseTrainer(object): def __init__(self, args): self.args = args self.exp_dir = args.exp_dir self.exp_dir.mkdir(exist_ok=True, parents=True) self.presetup(args) self.initial_setup(args) self.transform_setup(args) self.data_setup(args) self.loader_setup(args) self.model_setup(args) self.parameter_groups_setup(args) self.optimizer_setup(args) self.loss_setup(args) self.runinfo_setup(args) def presetup(self, args): pass def transform_setup(self, args): pass def initial_setup(self, args): logging.info(f"run pid: {os.getpid()} parent: {os.getppid()}") logging.info("#########") logging.info(args.__dict__) logging.info(f"Rank: {args.rank} World_size: {args.world_size}, Run {args.run_name}") args.cuda = torch.cuda.is_available() logging.info(f"Pytorch version: {torch.__version__}") logging.info("Using CUDA: {} CUDA AVAIL: {} #DEVICES: {} VERSION: {}".format( args.cuda, torch.cuda.is_available(), torch.cuda.device_count(), torch.version.cuda)) if not args.cuda: self.device = 'cpu' else: self.device = 'cuda' cudnn.benchmark = True cudnn.enabled = True random.seed(args.seed) torch.manual_seed(args.seed) torch.cuda.manual_seed_all(args.seed) def data_setup(self, args): logging.info("Creating data objects") self.train_data = SliceData( root=self.args.data_path / f'{args.challenge}_train', transform=self.train_transform, args=self.args, ) val_args = self.args val_args.max_kspace_width = None val_args.min_kspace_width = None val_args.max_kspace_height = None val_args.min_kspace_height = None val_args.start_slice = None val_args.end_slice = None val_args.acquisition_types = None val_args.acquisition_systems = None self.dev_data = SliceData( root=self.args.data_path / f'{args.challenge}_val', transform=self.dev_transform, args=val_args, ) if self.args.resize_type == "none": display_size, indices = list(self.dev_data.slice_indices_by_size.items())[0] self.display_data = CallbackDataset( callback=functools.partial(data_for_index, self.dev_data, indices), start=0, end=len(indices), increment=len(indices) // args.display_count) else: ndev = len(self.dev_data) indices = range(0, ndev) self.display_data = CallbackDataset( callback=functools.partial(data_for_index, self.dev_data, indices), start=0, end=ndev, increment=args.display_count) def loader_setup(self, args): logging.info("Creating samplers ...") train_sampler = RandomSampler(self.train_data) dev_sampler = RandomSampler(self.dev_data) logging.info("Creating data loaders ...") self.train_loader = DataLoader( dataset=self.train_data, batch_size=args.batch_size, num_workers=args.workers, pin_memory=args.pin_memory, sampler=train_sampler, ) self.dev_loader = DataLoader( dataset=self.dev_data, batch_size=args.batch_size, num_workers=args.workers, pin_memory=args.pin_memory, sampler=dev_sampler, ) self.display_loader = DataLoader( dataset=self.display_data, batch_size=args.batch_size, num_workers=args.workers, pin_memory=args.pin_memory, drop_last=False, ) logging.debug("Determining batches ...") self.nbatches = len(self.train_loader) logging.info("Train Loader created, batches: {}".format(self.nbatches)) def model_setup(self, args): if not args.gan: self.model = model.load(args.architecture, args) self.model.to(self.device) def parameter_groups_setup(self, args): self.parameter_groups = self.model.parameters() def optimizer_setup(self, args): self.optimizer = optimizer.load(self.parameter_groups, args) def loss_setup(self, args): pass def runinfo_setup(self, args): self.runinfo = {} self.runinfo["args"] = args self.runinfo["at_epoch"] = 0 self.runinfo["seed"] = args.seed self.runinfo["best_dev_loss"] = 1e9 self.runinfo["epoch"] = [] self.runinfo["train_losses"] = [] self.runinfo["train_fnames"] = [] self.runinfo["dev_losses"] = [] def serialize(self): return { 'runinfo': self.runinfo, 'epoch': self.runinfo["at_epoch"], 'args': self.args, 'model': self.model.state_dict(), 'optimizer': self.optimizer.state_dict(), 'best_dev_loss': self.runinfo["best_dev_loss"], 'exp_dir': self.exp_dir } def train(self): beginning = timer() args = self.args for epoch in range(self.runinfo["at_epoch"], args.epochs): self.runinfo["at_epoch"] = epoch logging.info("Starting epoch {}".format(epoch)) if self.args.is_distributed: self.train_loader.sampler.set_epoch(epoch) seed = self.runinfo["seed"] + 1031*epoch torch.manual_seed(seed) torch.cuda.manual_seed_all(seed) self.start_of_epoch_hook(epoch) start = timer() self.run(epoch) sys.stdout.flush() end = timer() logging.info(f"TRAIN Epoch took: {datetime.timedelta(seconds=end-start)}") logging.info("") self.end_of_epoch_hook(epoch) end = timer() logging.info(f"Run took: {datetime.timedelta(seconds=end-beginning)}") logging.info("FINISHED") self.postrun() def backwards(self, loss): loss.backward() def start_of_epoch_hook(self, epoch): if self.args.eval_at_start: dev_loss = self.stats(epoch, self.dev_loader, "Dev") logging.info(f"EVAL Loss: {dev_loss}") def end_of_epoch_hook(self, epoch): self.end_of_epoch_eval_hook(epoch) def end_of_epoch_eval_hook(self, epoch): logging.info("Starting evaluation") start = timer() dev_loss = self.stats(epoch, self.dev_loader, "Dev") end = timer() logging.info(f"EVAL Loss: {dev_loss} time: {datetime.timedelta(seconds=end-start)}") if math.isnan(dev_loss) or math.isinf(dev_loss): logging.info("NaN or Inf detected, ending training") self.postrun() sys.exit(1) is_new_best = dev_loss < self.runinfo["best_dev_loss"] self.runinfo["best_dev_loss"] = min(self.runinfo["best_dev_loss"], dev_loss) def postrun(self): pass
MIT License
mcs07/chemdataextractor
chemdataextractor/parse/cem.py
standardize_role
python
def standardize_role(role): role = role.lower() if any(c in role for c in {'synthesis', 'give', 'yield', 'afford', 'product', 'preparation of'}): return 'product' return role
Convert role text into standardized form.
https://github.com/mcs07/chemdataextractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/parse/cem.py#L279-L284
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import logging import re from lxml import etree from ..model import Compound from .actions import join, fix_whitespace from .common import roman_numeral, cc, nnp, hyph, nns, nn, cd, ls, optdelim, bcm, icm, rbrct, lbrct, sym, jj, hyphen, quote, dt from .base import BaseParser from .elements import I, R, W, T, ZeroOrMore, Optional, Not, Group, End, Start, OneOrMore, Any log = logging.getLogger(__name__) alphanumeric = R('^(d-)?(\d{1,2}[A-Za-z]{1,2}[′″‴‶‷⁗]?)(-d)?$') numeric = R('^\d{1,3}$') letter_number = R('^(H\d)?[LSNM]{1,2}\d\d?$') cm_blacklist = (W('in') | W(':') + R('^m\.?p\.?$', re.I) | W(':') + Any() + R('^N\.?M\.?R\.?\(?$', re.I)) exclude_prefix = Start() + (lbrct + roman_numeral + rbrct + Not(hyphen) | (R('^\d{1,3}(\.\d{1,3}(\.\d{1,3}(\.\d{1,3})?)?)?$') + Not(hyphen)) | (I('stage') | I('step') | I('section') | I('part')) + (alphanumeric | numeric | roman_numeral | R('^[A-Z]$'))) cm = (exclude_prefix.hide() + OneOrMore(Not(cm_blacklist) + icm)) | (bcm + ZeroOrMore(Not(cm_blacklist) + icm)) comma = (W(',') | T(',')).hide() colon = (W(':') | T(':')).hide() include_prefix = Not(bcm) + R('^(deuterated|triflated|butylated|brominated|acetylated|twisted)$', re.I) label_type = (Optional(I('reference') | I('comparative')) + R('^(compound|ligand|chemical|dye|derivative|complex|example|intermediate|product|formulae?|preparation)s?$', re.I))('role').add_action(join) + Optional(colon).hide() synthesis_of = ((I('synthesis') | I('preparation') | I('production') | I('data')) + (I('of') | I('for')))('role').add_action(join) to_give = (I('to') + (I('give') | I('yield') | I('afford')) | I('afforded') | I('affording') | I('yielded'))('role').add_action(join) label_blacklist = R('^(31P|[12]H|[23]D|15N|14C|[4567890]\d+)$') prefixed_label = R('^(cis|trans)-((d-)?(\d{1,2}[A-Za-z]{0,2}[′″‴‶‷⁗]?)(-d)?|[LS]\d\d?)$') strict_chemical_label = Not(label_blacklist) + (alphanumeric | roman_numeral | letter_number | prefixed_label)('label') lenient_chemical_label = numeric('label') | R('^([A-Z]\d{1,3})$')('label') | strict_chemical_label chemical_label = ((label_type + lenient_chemical_label + ZeroOrMore((T('CC') | comma) + lenient_chemical_label)) | (Optional(label_type.hide()) + strict_chemical_label + ZeroOrMore((T('CC') | comma) + strict_chemical_label))) chemical_label_phrase1 = (Optional(synthesis_of) + label_type + lenient_chemical_label + ZeroOrMore((T('CC') | comma) + lenient_chemical_label)) chemical_label_phrase2 = (synthesis_of + Optional(label_type) + lenient_chemical_label + ZeroOrMore((T('CC') | comma) + lenient_chemical_label)) chemical_label_phrase3 = (to_give + Optional(dt) + Optional(label_type) + lenient_chemical_label + Optional(lbrct + OneOrMore(Not(rbrct) + Any()) + rbrct).hide() + (End() | I('as') | colon | comma).hide()) chemical_label_phrase = Group(chemical_label_phrase1 | chemical_label_phrase2 | chemical_label_phrase3)('chemical_label_phrase') element_name = R('^(actinium|aluminium|aluminum|americium|antimony|argon|arsenic|astatine|barium|berkelium|beryllium|bismuth|bohrium|boron|bromine|cadmium|caesium|calcium|californium|carbon|cerium|cesium|chlorine|chromium|cobalt|copernicium|copper|curium|darmstadtium|dubnium|dysprosium|einsteinium|erbium|europium|fermium|flerovium|fluorine|francium|gadolinium|gallium|germanium|hafnium|hassium|helium|holmium|hydrargyrum|hydrogen|indium|iodine|iridium|iron|kalium|krypton|lanthanum|laIrencium|lithium|livermorium|lutetium|magnesium|manganese|meitnerium|mendelevium|mercury|molybdenum|natrium|neodymium|neon|neptunium|nickel|niobium|nitrogen|nobelium|osmium|oxygen|palladium|phosphorus|platinum|plumbum|plutonium|polonium|potassium|praseodymium|promethium|protactinium|radium|radon|rhenium|rhodium|roentgenium|rubidium|ruthenium|rutherfordium|samarium|scandium|seaborgium|selenium|silicon|silver|sodium|stannum|stibium|strontium|sulfur|tantalum|technetium|tellurium|terbium|thallium|thorium|thulium|tin|titanium|tungsten|ununoctium|ununpentium|ununseptium|ununtrium|uranium|vanadium|Iolfram|xenon|ytterbium|yttrium|zinc|zirconium)$', re.I) element_symbol = R('^(Ag|Au|Br|Cd|Cl|Cu|Fe|Gd|Ge|Hg|Mg|Pb|Pd|Pt|Ru|Sb|Si|Sn|Ti|Xe|Zn|Zr)$') registry_number = R('^BRN-?\d+$') | R('^CHEMBL-?\d+$') | R('^GSK-?\d{3-7}$') | R('^\[?(([1-9]\d{2,7})|([5-9]\d))-\d\d-\d\]?$') amino_acid = R('^((Ala|Arg|Asn|Asp|Cys|Glu|Gln|Gly|Ile|Leu|Lys|Met|Phe|Pro|Ser|Thr|Trp|Tyr|Val)-?)+$') amino_acid_name = ( R('^(histidine|isoleucine|leucine|lysine|methionine|phenylalanine|threonine|tryptophan|valine|selenocysteine|serine|tyrosine|alanine|arginine|asparagine|cysteine|glutamine|glycine|proline)$', re.I) | I('aspartic') + I('acid') | I('glutamic') + I('acid') ) formula = ( R('^C\(?\d{1,3}\)?(([HNOP]|Cl)\(?\d\d?\)?)+(\(?\d?[\+\-]\d?\)?)?$') | R('^((\(?\d{2,3}\)?)?(Fe|Ti|Mg|Ru|Cd|Se)\(?(\d\d?|[IV]+)?\)?((O|Hg)\(?\d?\d?\)?)?)+(\(?\d?[\+\-]\d?\)?)?$') | R('(NaOH|CaCl\d?\d?|EtOH|EtAc|MeOH|CF\d|C\d?\d?H\d\d?)+$') | R('(NO\d|BH4|Ca\(2\+\)|Ti\(0\)2|\(CH3\)2CHOH|\(CH3\)2CO|\(CH3\)2NCOH|C2H5CN|CH2ClCH2Cl|CH3C6H5|CH3CN|CH3CO2H|CH3COCH3|CH3COOH|CH3NHCOH|CH3Ph|CH3SOCH3|Cl2CH2|ClCH2CH2Cl)') | R('^(\(CD3\)2CO|\(CDCl2\)2|C6D6|C2D5CN|CD2Cl2|CD3CN|CD3COCD3|CD3OD|CD3SOCD3|CDCl3|CH3OD|D2O|EtOD|MeOD)$') | R('^[\[\{\(].*(NH\d|H2O|NO\d|C\d?H\d|C–H|NBu4|CF3|CD3|CO2|[bp]i?py|\(CO\)|\d,\d[\'′]?-|BF4|PF6|Cl\d|Fe\d|Ph\d).*[\]\}\)]$') | R('^[\[\{\(]{1,2}(Ru|Ph|Py|Cu|Ir|Pt|Et\d).*[\]\}\)]$') | R('^(GABA|NO|\(\d\)H|KCl)$') ) solvent_formula = ( W('CCl4') | W('(CH3)2CHOH') | W('(CH3)2CO') | W('(CH3)2NCOH') | W('C2H4Cl2') | W('C2H5CN') | W('C2H5OH') | W('C5H5N') | W('C6H12') | W('C6H14') | W('C6H5CH3') | W('C6H5Cl') | W('C6H6') | W('C7H8') | W('CH2Cl2') | W('CH2ClCH2Cl') | W('CH3C6H5') | W('CH3Cl') | W('CH3CN') | W('CH3CO2H') | W('CH3COCH3') | W('CH3COOH') | W('CH3NHCOH') | W('CH3NO2') | W('CH3OH') | W('CH3Ph') | W('CH3SOCH3') | W('CHCl2') | W('CHCl3') | W('Cl2CH2') | W('ClCH2CH2Cl') ) nmr_solvent = ( I('THF') + W('-') + I('d8') | I('d8') + W('-') + I('THF') | I('acetone') + W('-') + I('d6') | I('d6') + W('-') + I('acetone') | I('chloroform') + W('-') + I('d') | I('d') + W('-') + I('chloroform') | I('methanol') + W('-') + I('d4') | I('d4') + W('-') + I('methanol') | I('pyridine') + W('-') + I('d5') | I('d5') + W('-') + I('pyridine') | I('DMSO') + W('-') + I('d6') | I('d6') + W('-') + I('DMSO') | I('dimethylsulfoxide') + W('-') + I('d6') | I('d6') + W('-') + I('dimethylsulfoxide') | W('MeOH') + W('-') + I('d4') | I('d4') + W('-') + W('MeOH') | I('benzene-d6') + W('-') + I('d6') | I('d6') + W('-') + I('benzene') | I('d2') + W('-') + I('tetrachloroethane') | I('tetrachloroethane') + W('-') + I('d2') | I('(CD3)2CO') | I('(CDCl2)2') | I('C6D6') | I('C2D5CN') | I('CD2Cl2') | I('CD3CN') | I('CD3COCD3') | I('CD3OD') | I('CD3SOCD3') | I('CDCl3') | I('CH3OD') | I('D2O') | W('EtOD') | W('MeOD') | I('THF-d8') | I('d8-THF') | I('acetone-d6') | I('d6-acetone') | I('chloroform-d') | I('d-chloroform') | I('methanol-d4') | I('d4-methanol') | I('pyridine-d5') | I('d5-pyridine') | I('DMSO-d6') | I('d6-DMSO') | I('dimethylsulfoxide-d6') | W('C7D8') | I('d6-dimethylsulfoxide') | W('MeOH-d4') | W('d4-MeOH') | I('DMSO') | I('benzene-d6') | I('d6-benzene') | I('1,1,2,2-tetrachloroethane-d2') | I('tetrachloroethane-d2') | I('d2-tetrachloroethane') ) other_solvent = ( I('1-butanol') | I('1-butylimidazole') | I('1-cyclohexanol') | I('1-decanol') | I('1-heptanol') | I('1-hexanol') | I('1-methylethyl') + I('acetate') | I('1-octanol') | I('1-pentanol') | I('1-phenylethanol') | I('1-propanol') | I('1-undecanol') | I('1,1,1-trifluoroethanol') | I('1,1,1,3,3,3-hexafluoro-2-propanol') | I('1,1,1,3,3,3-hexafluoropropan-2-ol') | I('1,1,2-trichloroethane') | I('1,2-c2h4cl2') | I('1,2-dichloroethane') | I('1,2-dimethoxyethane') | I('1,2-dimethylbenzene') | I('1,2-ethanediol') | I('1,2,4-trichlorobenzene') | I('1,4-dimethylbenzene') | I('1,4-dioxane') | I('2-(n-morpholino)ethanesulfonic') + I('acid') | I('2-butanol') | I('2-butanone') | I('2-me-thf') | I('2-methf') | I('2-methoxy-2-methylpropane') | I('2-methyl') + I('tetrahydrofuran') | I('2-methylpentane') | I('2-methylpropan-1-ol') | I('2-methylpropan-2-ol') | I('2-methyltetrahydrofuran') | I('2-proh') | I('2-propanol') | I('2-propyl') + I('acetate') | I('2-pyrrolidone') | I('2,2,2-trifluoroethanol') | I('2,2,4-trimethylpentane') | I('2Me-THF') | I('2MeTHF') | I('3-methyl-pentane') | I('4-methyl-1,3-dioxolan-2-one') | I('acetic') + I('acid') | I('aceto-nitrile') | I('acetone') | I('acetonitrile') | I('acetononitrile') | I('AcOEt') | I('AcOH') | I('AgNO3') | I('aniline') | I('anisole') | I('benzene') | I('benzonitrile') | I('benzyl') + I('alcohol') | I('bromoform') | I('Bu2O') | I('Bu4NBr') | I('Bu4NClO4') | I('Bu4NPF6') | I('BuCN') | I('BuOH') | I('butan-1-ol') | I('butan-2-ol') | I('butan-2-one') | I('butane') | I('butanol') | I('butanone') | I('butene') | I('butyl') + I('acetate') | I('butyl') + I('acetonitrile') | I('butyl') + I('alcohol') | I('butyl') + I('amine') | I('butyl') + I('chloride') | I('butyl') + I('imidazole') | I('butyronitrile') | I('c-hexane') | I('carbon') + I('disulfide') | I('carbon') + I('tetrachloride') | I('chlorobenzene') | I('chloroform') | I('chloromethane') | I('chlorotoluene') | I('CHX') | I('cumene') | I('cyclohexane') | I('cyclohexanol') | I('cyclopentyl') + I('methyl') + I('ether') | I('DCE') | I('DCM') | I('decalin') | I('decan-1-ol') | I('decane') | I('decanol') | I('DEE') | I('di-isopropyl') + I('ether') | I('di-n-butyl') + I('ether') | I('di-n-hexyl') + I('ether') | I('dibromoethane') | I('dibutoxymethane') | I('dibutyl') + I('ether') | I('dichloro-methane') | I('dichlorobenzene') | I('dichloroethane') | I('dichloromethane') | I('diethoxymethane') | I('diethyl') + I('carbonate') | I('diethyl') + I('ether') | I('diethylamine') | I('diethylether') | I('diglyme') | I('dihexyl') + I('ether') | I('diiodomethane') | I('diisopropyl') + I('ether') | I('diisopropylamine') | I('dimethoxyethane') | I('dimethoxymethane') | I('dimethyl') + I('acetamide') | I('dimethyl') + I('acetimide') | I('dimethyl') + I('benzene') | I('dimethyl') + I('carbonate') | I('dimethyl') + I('ether') | I('dimethyl') + I('formamide') | I('dimethyl') + I('sulfoxide') | I('dimethylacetamide') | I('dimethylbenzene') | I('dimethylformamide') | I('dimethylformanide') | I('dimethylsulfoxide') | I('dioctyl') + I('sodium') + I('sulfosuccinate') | I('dioxane') | I('dioxolane') | I('dipropyl') + I('ether') | I('DMAc') | I('DMF') | I('DMSO') | I('Et2O') | I('EtAc') | I('EtAcO') | I('EtCN') | I('ethane') + I('diol') | I('ethane-1,2-diol') | I('ethanol') | I('ethyl') + I('(S)-2-hydroxypropanoate') | I('ethyl') + I('acetate') | I('ethyl') + I('benzoate') | I('ethyl') + I('formate') | I('ethyl') + I('lactate') | I('ethyl') + I('propionate') | I('ethylacetamide') | I('ethylacetate') | I('ethylene') + I('carbonate') | I('ethylene') + I('glycol') | I('ethyleneglycol') | I('ethylhexan-1-ol') | I('EtOAc') | I('EtOH') | I('eucalyptol') | I('F3-ethanol') | I('F3-EtOH') | I('formamide') | I('formic') + I('acid') | I('glacial') + I('acetic') + I('acid') | I('glycerol') | I('H2O') | I('H2O2') | I('H2SO4') | I('HBF4') | I('HCl') | I('HClO4') | I('HCO2H') | I('HCONH2') | I('heptan-1-ol') | I('heptane') | I('heptanol') | I('heptene') | I('HEX') | I('hexadecylamine') | I('hexafluoroisopropanol') | I('hexafluoropropanol') | I('hexan-1-ol') | I('hexane') | I('hexanes') | I('hexanol') | I('hexene') | I('hexyl') + I('ether') | I('HFIP') | I('HFP') | I('HNO3') | I('hydrochloric') + I('acid') | I('hydrogen') + I('peroxide') | I('iodobenzene') | I('isohexane') | I('isooctane') | I('isopropanol') | I('isopropyl') + I('benzene') | I('KBr') | I('LiCl') | I('ligroine') | I('limonene') | I('Me-THF') | I('Me2CO') | I('MeCN') | I('MeCO2Et') | I('MeNO2') | I('MeOH') | I('mesitylene') | I('methanamide') | I('methanol') | I('MeTHF') | I('methoxybenzene') | I('methoxyethylamine') | I('methyl') + I('acetamide') | I('methyl') + I('acetoacetate') | I('methyl') + I('benzene') | I('methyl') + I('butane') | I('methyl') + I('cyclohexane') | I('methyl') + I('ethyl') + I('ketone') | I('methyl') + I('formamide') | I('methyl') + I('formate') | I('methyl') + I('isobutyl') + I('ketone') | I('methyl') + I('laurate') | I('methyl') + I('methanoate') | I('methyl') + I('naphthalene') | I('methyl') + I('pentane') | I('methyl') + I('propan-1-ol') | I('methyl') + I('propan-2-ol') | I('methyl') + I('propionate') | I('methyl') + I('pyrrolidin-2-one') | I('methyl') + I('pyrrolidine') | I('methyl') + I('pyrrolidinone') | I('methyl') + I('t-butyl') + I('ether') | I('methyl') + I('tetrahydrofuran') | I('methyl-2-pyrrolidone') | I('methylbenzene') | I('methylcyclohexane') | I('methylene') + I('chloride') | I('methylformamide') | I('methyltetrahydrofuran') | I('MIBK') | I('morpholine') | I('mTHF') | I('n-butanol') | I('n-butyl') + I('acetate') | I('n-decane') | I('n-heptane') | I('n-HEX') | I('n-hexane') | I('n-methylformamide') | I('n-methylpyrrolidone') | I('n-nonane') | I('n-octanol') | I('n-pentane') | I('n-propanol') | I('n,n-dimethylacetamide') | I('n,n-dimethylformamide') | I('n,n-DMF') | I('Na2SO4') | I('NaCl') | I('NaClO4') | I('NaHCO3') | I('NaOH') | I('nBu4NBF4') | I('nitric') + I('acid') | I('nitrobenzene') | I('nitromethane') | I('nonane') | I('nujol') | I('o-dichlorobenzene') | I('o-xylene') | I('octan-1-ol') | I('octane') | I('octanol') | I('octene') | I('ODCB') | I('p-xylene') | I('pentan-1-ol') | I('pentane') | I('pentanol') | I('pentanone') | I('pentene') | I('PeOH') | I('perchloric') + I('acid') | I('PhCH3') | I('PhCl') | I('PhCN') | I('phenoxyethanol') | I('phenyl') + I('acetylene') | I('Phenyl') + I('ethanol') | I('phenylamine') | I('phenylethanolamine') | I('phenylmethanol') | I('PhMe') | I('phosphate') | I('phosphate') + I('buffered') + I('saline') | I('pinane') | I('piperidine') | I('polytetrafluoroethylene') | I('potassium') + I('bromide') | I('potassium') + I('phosphate') + I('buffer') | I('PrCN') | I('PrOH') | I('propan-1-ol') | I('propan-2-ol') | I('propane') | I('propane-1,2-diol') | I('propane-1,2,3-triol') | I('propanol') | I('propene') | I('propionic') + I('acid') | I('propionitrile') | I('propyl') + I('acetate') | I('propyl') + I('amine') | I('propylene') + I('carbonate') | I('propylene') + I('glycol') | I('pyridine') | I('pyrrolidone') | I('quinoline') | I('silver') + I('nitrate') | I('SNO2') | I('sodium') + I('chloride') | I('sodium') + I('hydroxide') | I('sodium') + I('perchlorate') | I('sulfuric') + I('acid') | I('t-butanol') | I('TBABF4') | I('TBAF') | I('TBAH') | I('TBAOH') | I('TBAP') | I('TBAPF6') | I('TEAP') | I('TEOA') | I('tert-butanol') | I('tert-butyl') + I('alcohol') | I('tetrabutylammonium') + I('hexafluorophosphate') | I('tetrabutylammonium') + I('hydroxide') | I('tetrachloroethane') | I('tetrachloroethylene') | I('tetrachloromethane') | I('tetrafluoroethylene') | I('tetrahydrofuran') | I('tetralin') | I('tetramethylsilane') | I('tetramethylurea') | I('tetrapiperidine') | I('TFA') | I('TFE') | I('THF') | I('tin') + I('dioxide') | I('titanium') + I('dioxide') | I('toluene') | I('tri-n-butyl') + I('phosphate') | I('triacetate') | I('triacetin') | I('tribromomethane') | I('tributyl') + I('phosphate') | I('trichlorobenzene') | I('trichloroethene') | I('trichloromethane') | I('triethyl') + I('amine') | I('triethyl') + I('phosphate') | I('triethylamine') | I('trifluoroacetic') + I('acid') | I('trifluoroethanol') | I('trimethyl') + I('benzene') | I('trimethyl') + I('pentane') | I('tris') | I('undecan-1-ol') | I('undecanol') | I('valeronitrile') | I('water') | I('xylene') | I('xylol') | I('[nBu4N][BF4]') | I('BCN') | I('ACN') | I('BTN') | I('BHDC') | I('AOT') | I('DMA') | I('Triton X-100') | I('MOPS') | I('TX-100') | I('H2O') + I('+') + I('TX') | I('H2O-Triton X') | I('MES') | I('HDA') | I('PIPES') | I('heavy') + I('water') | I('IPA') | I('KPB') | I('MCH') | I('NPA') | I('NMP') | I('PBS') | I('HEPES') | I('SDS') | I('TBP') | I('TEA') ) solvent_name_options = (nmr_solvent | solvent_formula | other_solvent) solvent_name = (Optional(include_prefix) + solvent_name_options)('name').add_action(join).add_action(fix_whitespace) chemical_name_options = ( cm | element_name | element_symbol | registry_number | amino_acid | amino_acid_name | formula ) chemical_name = (Optional(include_prefix) + chemical_name_options)('name').add_action(join).add_action(fix_whitespace) label_name_cem = (chemical_label + optdelim + chemical_name)('cem') labelled_as = (R('^labell?ed$') + W('as')).hide() optquote = Optional(quote.hide()) label_before_name = Optional(synthesis_of | to_give) + label_type + optdelim + label_name_cem + ZeroOrMore(optdelim + cc + optdelim + label_name_cem) likely_abbreviation = (Optional(include_prefix + Optional(hyphen)) + R('^([A-Z]{2,6}(\-[A-Z]{1,6})?|[A-Z](\-[A-Z]{2,6}))$'))('name').add_action(join).add_action(fix_whitespace) name_with_optional_bracketed_label = (Optional(synthesis_of | to_give) + chemical_name + Optional(lbrct + Optional(labelled_as + optquote) + (chemical_label | lenient_chemical_label | likely_abbreviation) + optquote + rbrct))('cem') lenient_name = OneOrMore(Not(rbrct) + (bcm | icm | jj | nn | nnp | nns | hyph | cd | ls | W(',')))('name').add_action(join).add_action(fix_whitespace) lenient_name_with_bracketed_label = (Start() + Optional(synthesis_of) + lenient_name + lbrct + label_type.hide() + lenient_chemical_label + rbrct)('cem') name_with_comma_within = Start() + Group(Optional(synthesis_of) + (cm + W(',') + cm + Not(cm) + Not(I('and')))('name').add_action(join).add_action(fix_whitespace))('cem') cem = (lenient_name_with_bracketed_label | label_before_name | name_with_comma_within | name_with_optional_bracketed_label) cem_phrase = Group(cem)('cem_phrase') r_equals = R('^[R]$') + W('=') + OneOrMore(Not(rbrct) + (bcm | icm | nn | nnp | nns | hyph | cd | ls)) of_table = (I('of') | I('in')) + Optional(dt) + I('table') bracketed_after_name = Optional(comma) + lbrct + Optional(labelled_as + optquote) + (chemical_label | lenient_chemical_label | likely_abbreviation) + optquote + Optional(Optional(comma) + r_equals | of_table) + rbrct comma_after_name = comma + Optional(labelled_as + optquote) + (chemical_label | likely_abbreviation) compound_heading_ending = (Optional(comma) + ((lbrct + (chemical_label | lenient_chemical_label | lenient_name) + Optional(Optional(comma) + r_equals | of_table) + rbrct) | chemical_label) + Optional(R('^[:;]$')).hide() | comma + (chemical_label | lenient_chemical_label)) + Optional(W('.')) + End() section_no = Optional(I('stage') | I('step') | I('section') | I('part')) + (T('CD') | R('^\d{1,3}(\.\d{1,3}(\.\d{1,3}(\.\d{1,3})?)?)?$') | (Optional(lbrct) + roman_numeral + rbrct)) compound_heading_style1 = Start() + Optional(section_no.hide()) + Optional(synthesis_of) + OneOrMore(Not(compound_heading_ending) + (bcm | icm | jj | nn | nnp | nns | hyph | sym | cd | ls | W(',')))('name').add_action(join).add_action(fix_whitespace) + compound_heading_ending + End() compound_heading_style2 = chemical_name + Optional(bracketed_after_name) compound_heading_style3 = synthesis_of + (lenient_name | chemical_name) + Optional(bracketed_after_name | comma_after_name) compound_heading_style4 = label_type + lenient_chemical_label + ZeroOrMore((T('CC') | comma) + lenient_chemical_label) + (lenient_name | chemical_name) + Optional(bracketed_after_name | comma_after_name) compound_heading_phrase = Group(compound_heading_style1 | compound_heading_style2 | compound_heading_style3 | compound_heading_style4 | chemical_label)('cem')
MIT License