repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
jfarmer08/ha-sengledapi
custom_components/sengledapi/switch.py
SengledSwitch.async_turn_on
python
async def async_turn_on(self, **kwargs): await self._switch.async_turn_on()
Instruct the switch to turn on.
https://github.com/jfarmer08/ha-sengledapi/blob/3a99cb494f788f49f2c0e944a786ebd58328f2a0/custom_components/sengledapi/switch.py#L73-L75
import logging from .sengledapi.sengledapi import SengledApi from . import DOMAIN import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchDevice _LOGGER = logging.getLogger(__name__) ATTRIBUTION = "Data provided by Sengled" async def async_setup_platform(hass, config, add_entities, discovery_info=None): _LOGGER.debug("""Creating new SengledApi switch component""") add_entities( SengledSwitch(switch) for switch in await hass.data[DOMAIN]["sengledapi_account"].async_list_switch() ) class SengledSwitch(SwitchDevice): def __init__(self, switch): self._switch = switch self._name = switch._friendly_name self._state = switch._state self._avaliable = True self._device_mac = switch._device_mac self._device_model = switch._device_model @property def name(self): return self._name @property def available(self): return self._avaliable @property def is_on(self): return self._state @property def unique_id(self): return self._device_mac @property def device_state_attributes(self): return { ATTR_ATTRIBUTION: ATTRIBUTION, "state": self._state, "available": self._avaliable, "device model": self._device_model, "mac": self._device_mac, }
Apache License 2.0
directgroup/direct
direct/utils/__init__.py
detach_dict
python
def detach_dict(data: Dict[str, torch.Tensor], keys: Optional[Union[List, Tuple, KeysView]] = None) -> Dict: if keys is None: keys = data.keys() return {k: v.detach() for k, v in data.items() if k in keys if isinstance(v, torch.Tensor)}
Return a detached copy of a dictionary. Only torch.Tensor's are detached. Parameters ---------- data : Dict[str, torch.Tensor] keys : List, Tuple Subselection of keys to detach Returns ------- Dictionary at the new device.
https://github.com/directgroup/direct/blob/961989bfac0177988de04e8a3ff563db850575e2/direct/utils/__init__.py#L190-L206
import abc import ast import functools import importlib import logging import pathlib import random import subprocess import sys from collections import OrderedDict from typing import Any, Callable, Dict, KeysView, List, Optional, Tuple, Union import numpy as np import torch logger = logging.getLogger(__name__) def is_complex_data(data: torch.Tensor, complex_last: bool = True) -> bool: if 2 not in data.shape: return False if complex_last: if data.size(-1) != 2: return False else: if data.ndim == 6: if data.size(2) != 2 and data.size(-1) != 2: return False elif data.ndim == 5: if data.size(1) != 2 and data.size(2) != 2 and data.size(-1) != 2: return False elif data.ndim == 4: if data.size(1) != 2 and data.size(-1) != 2: return False elif data.ndim == 3: if data.size(-1) != 2: return False else: raise ValueError(f"Not compatible number of dimensions for complex data. Got {data.ndim}.") return True def is_power_of_two(number: int) -> bool: return number != 0 and ((number & (number - 1)) == 0) def ensure_list(data: Any) -> List: if data is None: return [] if not isinstance(data, (list, tuple)): return [data] return list(data) def cast_as_path(data: Optional[Union[pathlib.Path, str]]) -> Optional[pathlib.Path]: if data is None: return None return pathlib.Path(data) def str_to_class(module_name: str, function_name: str) -> Callable: tree = ast.parse(function_name) func_call = tree.body[0].value args = [ast.literal_eval(arg) for arg in func_call.args] if hasattr(func_call, "args") else [] kwargs = ( {arg.arg: ast.literal_eval(arg.value) for arg in func_call.keywords} if hasattr(func_call, "keywords") else {} ) module = importlib.import_module(module_name) if not args and not kwargs: return getattr(module, function_name) return functools.partial(getattr(module, func_call.func.id), *args, **kwargs) def dict_to_device( data: Dict[str, torch.Tensor], device: Union[torch.device, str, None], keys: Union[List, Tuple, KeysView, None] = None, ) -> Dict: if keys is None: keys = data.keys() return {k: v.to(device) if isinstance(v, torch.Tensor) else v for k, v in data.items() if k in keys}
Apache License 2.0
airtestproject/airtest
airtest/core/api.py
double_click
python
def double_click(v): if isinstance(v, Template): pos = loop_find(v, timeout=ST.FIND_TIMEOUT) else: try_log_screen() pos = v G.DEVICE.double_click(pos) delay_after_operation() return pos
Perform double click :param v: target to touch, either a ``Template`` instance or absolute coordinates (x, y) :return: finial position to be clicked :Example: >>> double_click((100, 100)) >>> double_click(Template(r"tpl1606730579419.png"))
https://github.com/airtestproject/airtest/blob/c29d0462fe29db5c04cda31de1c05bcae5991061/airtest/core/api.py#L366-L384
import os import time from six.moves.urllib.parse import parse_qsl, urlparse from airtest.core.cv import Template, loop_find, try_log_screen from airtest.core.error import TargetNotFoundError from airtest.core.settings import Settings as ST from airtest.utils.compat import script_log_dir from airtest.core.helper import (G, delay_after_operation, import_device_cls, logwrap, set_logdir, using, log) def init_device(platform="Android", uuid=None, **kwargs): cls = import_device_cls(platform) dev = cls(uuid, **kwargs) G.add_device(dev) return dev def connect_device(uri): d = urlparse(uri) platform = d.scheme host = d.netloc uuid = d.path.lstrip("/") params = dict(parse_qsl(d.query)) if host: params["host"] = host.split(":") dev = init_device(platform, uuid, **params) return dev def device(): return G.DEVICE def set_current(idx): dev_dict = {dev.uuid: dev for dev in G.DEVICE_LIST} if idx in dev_dict: current_dev = dev_dict[idx] elif isinstance(idx, int) and idx < len(G.DEVICE_LIST): current_dev = G.DEVICE_LIST[idx] else: raise IndexError("device idx not found in: %s or %s" % ( list(dev_dict.keys()), list(range(len(G.DEVICE_LIST))))) G.DEVICE = current_dev def auto_setup(basedir=None, devices=None, logdir=None, project_root=None, compress=None): if basedir: if os.path.isfile(basedir): basedir = os.path.dirname(basedir) if basedir not in G.BASEDIR: G.BASEDIR.append(basedir) if devices: for dev in devices: connect_device(dev) if logdir: logdir = script_log_dir(basedir, logdir) set_logdir(logdir) if project_root: ST.PROJECT_ROOT = project_root if compress: ST.SNAPSHOT_QUALITY = compress @logwrap def shell(cmd): return G.DEVICE.shell(cmd) @logwrap def start_app(package, activity=None): G.DEVICE.start_app(package, activity) @logwrap def stop_app(package): G.DEVICE.stop_app(package) @logwrap def clear_app(package): G.DEVICE.clear_app(package) @logwrap def install(filepath, **kwargs): return G.DEVICE.install_app(filepath, **kwargs) @logwrap def uninstall(package): return G.DEVICE.uninstall_app(package) @logwrap def snapshot(filename=None, msg="", quality=None, max_size=None): if not quality: quality = ST.SNAPSHOT_QUALITY if not max_size and ST.IMAGE_MAXSIZE: max_size = ST.IMAGE_MAXSIZE if filename: if not os.path.isabs(filename): logdir = ST.LOG_DIR or "." filename = os.path.join(logdir, filename) screen = G.DEVICE.snapshot(filename, quality=quality, max_size=max_size) return try_log_screen(screen, quality=quality, max_size=max_size) else: return try_log_screen(quality=quality, max_size=max_size) @logwrap def wake(): G.DEVICE.wake() @logwrap def home(): G.DEVICE.home() @logwrap def touch(v, times=1, **kwargs): if isinstance(v, Template): pos = loop_find(v, timeout=ST.FIND_TIMEOUT) else: try_log_screen() pos = v for _ in range(times): G.DEVICE.touch(pos, **kwargs) time.sleep(0.05) delay_after_operation() return pos click = touch @logwrap
Apache License 2.0
jazzband/django-queued-storage
queued_storage/backends.py
QueuedStorage.get_accessed_time
python
def get_accessed_time(self, name): return self.get_storage(name).get_accessed_time(name)
Django +1.10 Returns the last accessed time (as datetime object) of the file specified by name. :param name: file name :type name: str :rtype: :class:`~python:datetime.datetime`
https://github.com/jazzband/django-queued-storage/blob/541faad8f5d03e7e066d02b2a2d5b89d45b77819/queued_storage/backends.py#L348-L358
import six from packaging import version import django from django.core.cache import cache from django.core.exceptions import ImproperlyConfigured from django.utils.functional import SimpleLazyObject from django.utils.http import urlquote from .conf import settings from .utils import import_attribute DJANGO_VERSION = django.get_version() if version.parse(DJANGO_VERSION) <= version.parse('1.7'): from django.utils.deconstruct import deconstructible class LazyBackend(SimpleLazyObject): def __init__(self, import_path, options): backend = import_attribute(import_path) super(LazyBackend, self).__init__(lambda: backend(**options)) class QueuedStorage(object): local = None local_options = None remote = None remote_options = None task = 'queued_storage.tasks.Transfer' delayed = False cache_prefix = settings.QUEUED_STORAGE_CACHE_PREFIX def __init__(self, local=None, remote=None, local_options=None, remote_options=None, cache_prefix=None, delayed=None, task=None): self.local_path = local or self.local self.local_options = local_options or self.local_options or {} self.local = self._load_backend(backend=self.local_path, options=self.local_options) self.remote_path = remote or self.remote self.remote_options = remote_options or self.remote_options or {} self.remote = self._load_backend(backend=self.remote_path, options=self.remote_options) self.task = self._load_backend(backend=task or self.task, handler=import_attribute) if delayed is not None: self.delayed = delayed if cache_prefix is not None: self.cache_prefix = cache_prefix def _load_backend(self, backend=None, options=None, handler=LazyBackend): if backend is None: raise ImproperlyConfigured("The QueuedStorage class '%s' " "doesn't define a needed backend." % (self)) if not isinstance(backend, six.string_types): raise ImproperlyConfigured("The QueuedStorage class '%s' " "requires its backends to be " "specified as dotted import paths " "not instances or classes" % self) return handler(backend, options) def get_storage(self, name): cache_result = cache.get(self.get_cache_key(name)) if cache_result: return self.remote elif cache_result is None and self.remote.exists(name): cache.set(self.get_cache_key(name), True) return self.remote else: return self.local def get_cache_key(self, name): return '%s_%s' % (self.cache_prefix, urlquote(name)) def using_local(self, name): return self.get_storage(name) is self.local def using_remote(self, name): return self.get_storage(name) is self.remote def open(self, name, mode='rb'): return self.get_storage(name).open(name, mode) def save(self, name, content, max_length=None): cache_key = self.get_cache_key(name) cache.set(cache_key, False) name = self.get_available_name(name) try: name = self.local.save(name, content, max_length=max_length) except TypeError: name = self.local.save(name, content) if not self.delayed: self.result = self.transfer(name, cache_key=cache_key) return name def transfer(self, name, cache_key=None): if cache_key is None: cache_key = self.get_cache_key(name) return self.task.delay(name, cache_key, self.local_path, self.remote_path, self.local_options, self.remote_options) def get_valid_name(self, name): return self.get_storage(name).get_valid_name(name) def get_available_name(self, name): local_available_name = self.local.get_available_name(name) remote_available_name = self.remote.get_available_name(name) if remote_available_name > local_available_name: return remote_available_name return local_available_name def path(self, name): return self.get_storage(name).path(name) def delete(self, name): return self.get_storage(name).delete(name) def exists(self, name): return self.get_storage(name).exists(name) def listdir(self, name): return self.get_storage(name).listdir(name) def size(self, name): return self.get_storage(name).size(name) def url(self, name): return self.get_storage(name).url(name) def accessed_time(self, name): return self.get_storage(name).accessed_time(name) def created_time(self, name): return self.get_storage(name).created_time(name) def modified_time(self, name): return self.get_storage(name).modified_time(name)
BSD 3-Clause New or Revised License
panoptes/pocs
src/panoptes/pocs/mount/mount.py
AbstractMount.state
python
def state(self): return self._state
bool: Mount state.
https://github.com/panoptes/pocs/blob/fd8c4034c6d2d9a4da46e6a1d5aa5a7b9ed68112/src/panoptes/pocs/mount/mount.py#L196-L198
from abc import abstractmethod import time from pathlib import Path from astropy import units as u from astropy.coordinates import EarthLocation from astropy.coordinates import SkyCoord from panoptes.pocs.base import PanBase from panoptes.utils.serializers import from_yaml from panoptes.utils.time import current_time from panoptes.utils import error from panoptes.utils.time import CountdownTimer class AbstractMount(PanBase): def __init__(self, location, commands=None, *args, **kwargs): super(AbstractMount, self).__init__(*args, **kwargs) assert isinstance(location, EarthLocation) self.mount_settings = self.get_config('mount.settings', dict()) self.logger.debug(f"Mount settings: {self.mount_settings}") self.logger.debug("Setting up commands for mount") self._setup_commands(commands) self.logger.debug("Mount commands set up") self._location = location self.non_sidereal_available = self.mount_settings.setdefault('non_sidereal_available', False) self.PEC_available = self.mount_settings.setdefault('PEC_available', False) self._is_connected = False self._is_initialized = False self._is_slewing = False self._is_parked = True self._at_mount_park = True self._is_tracking = False self._is_home = False self._state = 'Parked' self.sidereal_rate = ((360 * u.degree).to(u.arcsec) / (86164 * u.second)) self.ra_guide_rate = 0.9 self.dec_guide_rate = 0.9 self._tracking_rate = 1.0 self._tracking = 'Sidereal' self.min_tracking_threshold = self.mount_settings.setdefault('min_tracking_threshold', 100) self.max_tracking_threshold = self.mount_settings.setdefault('max_tracking_threshold', 99999) self._movement_speed = '' self._status_lookup = dict() self._target_coordinates = None self._current_coordinates = None self._park_coordinates = None self.brand = self.get_config('mount.brand', '') self.model = self.get_config('mount.model', '') self.port = self.get_config('mount.serial.port') def __str__(self): mount_str = f'{self.brand} {self.model}' if self.port is not None: mount_str = f'{mount_str} {self.port}' return mount_str @abstractmethod def connect(self): raise NotImplementedError @abstractmethod def initialize(self, *arg, **kwargs): raise NotImplementedError def disconnect(self): self.logger.info('Disconnecting mount') if not self.is_parked: self.park() self._is_connected = False @property def status(self): status = {} try: status['tracking_rate'] = f'{self.tracking_rate:0.04f}' status['ra_guide_rate'] = self.ra_guide_rate status['dec_guide_rate'] = self.dec_guide_rate status['movement_speed'] = self.movement_speed current_coord = self.get_current_coordinates() if current_coord is not None: status['current_ra'] = current_coord.ra status['current_dec'] = current_coord.dec if self.has_target: target_coord = self.get_target_coordinates() status['mount_target_ra'] = target_coord.ra status['mount_target_dec'] = target_coord.dec except Exception as e: self.logger.debug(f'Problem getting mount status: {e!r}') status.update(self._update_status()) return status @property def location(self): return self._location @location.setter def location(self, location): self._location = location self._setup_location_for_mount() @property def is_connected(self): return self._is_connected @property def is_initialized(self): return self._is_initialized @property def is_parked(self): return self._is_parked @property def at_mount_park(self): return self._at_mount_park @property def is_home(self): return self._is_home @property def is_tracking(self): return self._is_tracking @property def is_slewing(self): return self._is_slewing @property
MIT License
josuebrunel/myql
myql/myql.py
YQL.multi_query
python
def multi_query(self, queries): pass
Allow multi query
https://github.com/josuebrunel/myql/blob/891bad29cc83a81b3f5ebc4d0401d6f2c22f119e/myql/myql.py#L274-L277
from __future__ import absolute_import import re import logging import requests from myql import errors logging.basicConfig(level=logging.DEBUG,format="[%(asctime)s %(levelname)s] [%(name)s.%(module)s.%(funcName)s] %(message)s \n") logger = logging.getLogger('mYQL') logging.getLogger('requests').disabled = True class YQL(object): PUBLIC_URL = 'https://query.yahooapis.com/v1/public/yql' PRIVATE_URL = 'https://query.yahooapis.com/v1/yql' COMMUNITY_DATA = "env 'store://datatables.org/alltableswithkeys'; " FUNC_FILTERS = ['sort', 'tail', 'truncate', 'reverse', 'unique', 'sanitize'] def __init__(self, community=True, format='json', jsonCompact=True, crossProduct=None, debug=False, diagnostics=False, oauth=None): self.community = community self.format = format self._table = None self._query = None self._payload = {} self._vars = {} self.diagnostics = diagnostics self._limit = None self._offset = None self.crossProduct = crossProduct self.jsonCompact = jsonCompact self.debug = debug if oauth: self.oauth = oauth def __repr__(self): return "<Community>: {0} - <Format>: {1} ".format(self.community, self.format) def _payload_builder(self, query, format=None): if self.community : query = self.COMMUNITY_DATA + query if vars(self).get('yql_table_url') : query = "use '{0}' as {1}; ".format(self.yql_table_url, self.yql_table_name) + query if vars(self).get('_func'): query = '| '.join((query, self._func)) self._query = query self._query = self._add_limit() self._query = self._add_offset() logger.info("QUERY = %s" %(self._query,)) payload = { 'q': self._query, 'callback': '', 'diagnostics': self.diagnostics, 'format': format if format else self.format, 'debug': self.debug, 'jsonCompact': 'new' if self.jsonCompact else '' } if vars(self).get('_vars'): payload.update(self._vars) if self.crossProduct: payload['crossProduct'] = 'optimized' self._payload = payload logger.info("PAYLOAD = %s " %(payload, )) return payload def raw_query(self, query, format=None, pretty=False): if format: format = format else: format = self.format payload = self._payload_builder(query, format=format) response = self.execute_query(payload) if pretty: response = self.response_builder(response) return response def execute_query(self, payload): if vars(self).get('oauth'): if not self.oauth.token_is_valid(): self.oauth.refresh_token() response = self.oauth.session.get(self.PRIVATE_URL, params= payload, header_auth=True) else: response = requests.get(self.PUBLIC_URL, params= payload) self._response = response return response def _clause_formatter(self, cond): if len(cond) == 2 : cond = ' '.join(cond) return cond if 'in' in cond[1].lower() : if not isinstance(cond[2], (tuple, list)): raise TypeError('("{0}") must be of type <type tuple> or <type list>'.format(cond[2])) if 'select' not in cond[2][0].lower() : cond[2] = "({0})".format(','.join(map(str,["'{0}'".format(e) for e in cond[2]]))) else: cond[2] = "({0})".format(','.join(map(str,["{0}".format(e) for e in cond[2]]))) cond = " ".join(cond) else: if isinstance(cond[2], str) and cond[2].startswith('@'): cond[2] = "{0}".format(cond[2]) else : cond[2] = "'{0}'".format(cond[2]) cond = ' '.join(cond) return cond def response_builder(self, response): try: r = response.json() result = r['query']['results'] response = { 'num_result': r['query']['count'] , 'result': result } except (Exception,) as e: print(e) return response.content return response def _func_filters(self, filters): if not isinstance(filters, (list,tuple)): raise TypeError('func_filters must be a <type list> or <type tuple>') for i, func in enumerate(filters) : if isinstance(func, str) and func == 'reverse': filters[i] = 'reverse()' elif isinstance(func, tuple) and func[0] in YQL.FUNC_FILTERS: filters[i] = '{:s}(count={:d})'.format(*func) elif isinstance(func, dict) : func_stmt = '' func_name = list(func.keys())[0] values = [ "{0}='{1}'".format(v[0], v[1]) for v in func[func_name] ] func_stmt = ','.join(values) func_stmt = '{0}({1})'.format(func_name, func_stmt) filters[i] = func_stmt else: raise TypeError('{0} is neither a <str>, a <tuple> or a <dict>'.format(func)) return '| '.join(filters) def _add_limit(self,): return ''.join((self._query," LIMIT {0} ".format(self._limit))) if self._limit else self._query def _add_offset(self,): return ''.join((self._query," OFFSET {0} ".format(self._offset))) if self._offset else self._query def use(self, url, name='mytable'): self.yql_table_url = url self.yql_table_name = name return {'table url': url, 'table name': name} def set(self, myvars): self._vars.update(myvars) return True def desc(self, table): query = "desc {0}".format(table) response = self.raw_query(query) return response def get(self, *args, **kwargs): self = self.select(*args, **kwargs) payload = self._payload_builder(self._query) response = self.execute_query(payload) return response def select(self, table, items=None, limit=None, offset=None, remote_filter=None, func_filters=None): self._table = table if remote_filter: if not isinstance(remote_filter, tuple): raise TypeError("{0} must be of type <type tuple>".format(remote_filter)) table = "%s(%s)" %(table, ','.join(map(str, remote_filter))) if not items: items = ['*'] self._query = "SELECT {1} FROM {0} ".format(table, ','.join(items)) if func_filters: self._func = self._func_filters(func_filters) self._limit = limit self._offset = offset return self
MIT License
sphinx-contrib/restbuilder
sphinxcontrib/builders/rst.py
RstBuilder.get_outdated_docs
python
def get_outdated_docs(self): for docname in self.env.found_docs: if docname not in self.env.all_docs: yield docname continue sourcename = path.join(self.env.srcdir, docname + self.file_suffix) targetname = path.join(self.outdir, self.file_transform(docname)) try: targetmtime = path.getmtime(targetname) except Exception: targetmtime = 0 try: srcmtime = path.getmtime(sourcename) if srcmtime > targetmtime: yield docname except EnvironmentError: pass
Return an iterable of input files that are outdated.
https://github.com/sphinx-contrib/restbuilder/blob/1492b304b958ba687da8ab148f23f4aa0b699479/sphinxcontrib/builders/rst.py#L56-L81
from __future__ import (print_function, unicode_literals, absolute_import) import codecs from os import path from docutils.io import StringOutput from sphinx.builders import Builder from sphinx.util.osutil import ensuredir, SEP from ..writers.rst import RstWriter class RstBuilder(Builder): name = 'rst' format = 'rst' file_suffix = '.rst' link_suffix = None def init(self): if self.config.rst_file_suffix is not None: self.file_suffix = self.config.rst_file_suffix if self.config.rst_link_suffix is not None: self.link_suffix = self.config.rst_link_suffix elif self.link_suffix is None: self.link_suffix = self.file_suffix def file_transform(docname): return docname + self.file_suffix def link_transform(docname): return docname + self.link_suffix if self.config.rst_file_transform is not None: self.file_transform = self.config.rst_file_transform else: self.file_transform = file_transform if self.config.rst_link_transform is not None: self.link_transform = self.config.rst_link_transform else: self.link_transform = link_transform
BSD 2-Clause Simplified License
pygae/clifford
clifford/tools/g3c/__init__.py
val_midpoint_of_line_cluster
python
def val_midpoint_of_line_cluster(array_line_cluster): accumulator_matrix = np.zeros((32, 32)) for i in range(array_line_cluster.shape[0]): L_i_l = get_left_gmt_matrix(array_line_cluster[i, :]) L_i_r = get_right_gmt_matrix(array_line_cluster[i, :]) accumulator_matrix += L_i_r @ L_i_l power_mat = np.linalg.matrix_power(accumulator_matrix / array_line_cluster.shape[0], 256) pp_val = imt_func(array_line_cluster[0, :], eo.value) p_start = val_normalise_n_minus_1(project_val(gmt_func(gmt_func(pp_val, ninf.value), pp_val), 1)) p_end = project_val(power_mat @ p_start, 1) final_point = val_normalise_n_minus_1(project_val(gmt_func(gmt_func(p_end, ninf.value), p_end), 1)) return final_point
Gets a center point of a line cluster Hadfield and Lasenby AGACSE2018
https://github.com/pygae/clifford/blob/e63f8564d64d6a5dad5fbb415772eb8aecbc2d8f/clifford/tools/g3c/__init__.py#L713-L742
import math import numba import numpy as np from clifford.tools.g3 import quaternion_to_rotor, random_euc_mv, random_rotation_rotor, generate_rotation_rotor from clifford.g3c import * import clifford as cf from clifford import NUMBA_PARALLEL, MVArray from scipy.interpolate import interp1d try: from pyganja import draw pyganja_available = True except ImportError: pyganja_available = False ninf = einf no = -eo E = ninf ^ (no) I5 = e12345 I3 = e123 E0 = ninf ^ -no niono = ninf ^ no unit_scalar_mv = 1.0 + 0.0*e1 adjoint_func = layout.adjoint_func gmt_func = layout.gmt_func omt_func = layout.omt_func imt_func = layout.imt_func epsilon = 10*10**(-6) mask0 = layout.get_grade_projection_matrix(0) mask1 = layout.get_grade_projection_matrix(1) mask2 = layout.get_grade_projection_matrix(2) mask3 = layout.get_grade_projection_matrix(3) mask4 = layout.get_grade_projection_matrix(4) mask5 = layout.get_grade_projection_matrix(5) mask_2minus4 = mask2 - mask4 def _defunct_wrapper(f): return f def interpret_multivector_as_object(mv): g_pres = mv.grades(eps=0.00000001) if len(g_pres) != 1: return -1 grade, = g_pres if grade == 1: if mv(e123) == mv: return 1 elif np.sum(np.abs((mv**2).value)) < 0.00000001: return 2 else: return 0 elif mv.isBlade(): if grade == 2: return 3 elif grade == 3: if abs(mv[e123]) > epsilon: return 4 else: return 5 elif grade == 4: if abs(((mv*I5)|no)[()]) > epsilon: return 7 else: return 6 else: return -1 @numba.njit @_defunct_wrapper def val_sphere_line_intersect(s, l): return sphere_line_intersect( layout.MultiVector(s), layout.MultiVector(l), ) @numba.njit def sphere_line_intersect(s, l): mv = meet(s, l) return (mv | mv).value[0] > 0 def sphere_in_sphere(S1, S2, tolerance=10**-6): return (unsign_sphere(S1)|unsign_sphere(S2))[()] <= -1 + tolerance def sphere_beyond_plane(sphere, plane): no_intersection = ((meet(sphere, plane) ** 2)[()] < 0) return no_intersection and point_beyond_plane(normalise_n_minus_1((sphere * einf * sphere)(1)), plane) def sphere_behind_plane(sphere, plane): no_intersection = ((meet(sphere, plane) ** 2)[()] < 0) return no_intersection and not point_beyond_plane(normalise_n_minus_1((sphere * einf * sphere)(1)), plane) def point_beyond_plane(point, plane): return (point|(I5*plane))[()] < 0 @numba.njit def unsign_sphere(S): return (S*(-(fast_dual(S) | ninf).value[0])).normal() @numba.njit @_defunct_wrapper def val_unsign_sphere(S): return unsign_sphere(layout.MultiVector(S)).value def join_spheres(S1in, S2in): s1 = unsign_sphere(S1in) s2 = unsign_sphere(S2in) L = (((s1 * I5) ^ (s2 * I5) ^ einf)(3)).normal() pp1 = (meet(s1, L)(2)).normal() pp2 = (meet(s2, L)(2)).normal() p1 = point_pair_to_end_points(pp1)[0] p2 = point_pair_to_end_points(pp2)[1] if (p1|(s2*I5))[()] > 0.0: opt_sphere = s2(4) elif (p2|(s1*I5))[()] > 0.0: opt_sphere = s1(4) else: p12 = p1 ^ p2 L2 = (p12 * (p12 ^ einf)).normal() opt_sphere = (L2*I5)(4) return unsign_sphere(opt_sphere) def enclosing_sphere(spheres): nspheres = len(spheres) if nspheres == 1: return spheres[0] elif nspheres == 2: return join_spheres(spheres[0], spheres[1]) mins = spheres[0] for i in range(1, nspheres): mins = join_spheres(mins, spheres[i]) return mins def project_points_to_plane(point_list, plane): projected_list = [] for point in point_list: proj_point = ((point|plane)*plane) proj_point = normalise_n_minus_1((proj_point*einf*proj_point)(1)) projected_list.append(proj_point) return projected_list def project_points_to_sphere(point_list, sphere, closest=True): if closest: point_index = 1 else: point_index = 0 projected_list = [] C = sphere*einf*sphere for point in point_list: proj_point = point_pair_to_end_points(meet((point^C^einf).normal(), sphere))[point_index] projected_list.append(proj_point) return projected_list def project_points_to_circle(point_list, circle, closest=True): circle_plane = (circle^einf).normal() planar_points = project_points_to_plane(point_list, circle_plane) circle_points = project_points_to_sphere(planar_points, -circle*circle_plane*I5, closest=closest) return circle_points def project_points_to_line(point_list, line): projected_list = [] for point in point_list: pp = point|line proj_point = normalise_n_minus_1((pp*einf*pp)(1)) projected_list.append(proj_point) return projected_list def iterative_closest_points_on_circles(C1, C2, niterations=20): cav = average_objects([C1, C2]) cav2 = average_objects([C1, -C2]) PP = meet(cav, cav2^einf).normal() P_list = point_pair_to_end_points(PP) dmin = np.inf for Ptest in P_list: d = -(project_points_to_circle([Ptest], C1)[0](1)|project_points_to_circle([Ptest], C2)[0](1))[()] if d < dmin: dmin = d P2 = Ptest P1 = project_points_to_circle([P2], C1)[0](1) P2 = project_points_to_circle([P1], C2)[0](1) for i in range(niterations): P1 = project_points_to_circle([P2], C1)[0](1) P2 = project_points_to_circle([P1], C2)[0](1) return P1, P2 def closest_point_on_line_from_circle(C, L, eps=1E-6): return project_points_to_line([closest_point_on_circle_from_line(C, L, eps=eps)], L)[0] def closest_point_on_circle_from_line(C, L, eps=1E-6): phi = (C^einf).normal() B = meet(L, phi) A = normalise_n_minus_1((C * einf * C)(1)) bound_sphere = ((C * phi) * I5).normal() if abs((B**2)[()]) < eps: Lpln = (L.normal() + (phi*L*phi)(3).normal()).normal() X = normalise_n_minus_1((A|Lpln)*einf*(A|Lpln)) if sphere_in_sphere(X*I5, bound_sphere): PP = meet(Lpln, bound_sphere) return point_pair_to_end_points(PP)[0] else: L2 = A ^ X ^ einf PP = meet(L2, bound_sphere) return point_pair_to_end_points(PP)[1] P = intersect_line_and_plane_to_point(L, phi) Adash = L*A*L E = up(down(A)*0.5 + down(Adash)*0.5) Edash = normalise_n_minus_1((phi*E*phi)(1)) Y = up(down(E)*0.5 + down(Edash)*0.5) if sphere_in_sphere(Y*I5, bound_sphere): if abs((A | P)[()]) < eps: L2 = (L.normal() + (phi * L * phi)(3).normal()) if abs(L2) < eps: L2 = (A ^ project_points_to_circle([random_conformal_point()], C)[0] ^ einf).normal() else: L2 = L2.normal() elif abs((P | Y)[()]) < eps: L2 = A ^ Y ^ einf else: L2 = P ^ Y ^ einf else: L2 = A ^ Y ^ einf PP = meet(L2, bound_sphere) Xs = point_pair_to_end_points(PP) return max(Xs, key=lambda x: (x | P)[()]) def iterative_closest_points_circle_line(C, L, niterations=20): cav = average_objects([C, L]) cav2 = average_objects([C, -L]) PP = meet(cav, cav2^einf).normal() P_list = point_pair_to_end_points(PP) dmin = np.inf for Ptest in P_list: d = -(project_points_to_circle([Ptest], C)[0](1)|project_points_to_line([Ptest], L)[0](1))[()] if d < dmin: dmin = d P2 = Ptest P1 = project_points_to_circle([P2], C)[0](1) P2 = project_points_to_line([P1], L)[0](1) for i in range(niterations): P1 = project_points_to_circle([P2], C)[0](1) P2 = project_points_to_line([P1], L)[0](1) P1 = normalise_n_minus_1(P1) P2 = normalise_n_minus_1(P2) return P1, P2 def iterative_furthest_points_on_circles(C1, C2, niterations=20): P2 = random_conformal_point() P1 = project_points_to_circle([P2], C1, closest=False)[0](1) P2 = project_points_to_circle([P1], C2, closest=False)[0](1) for i in range(niterations): P1 = project_points_to_circle([P2], C1, closest=False)[0](1) P2 = project_points_to_circle([P1], C2, closest=False)[0](1) return P1, P2 def normalise_TR_to_unit_T(TR): R_only = TR(e123) T_only = (TR*~R_only).normal() t = -2*(T_only|no) scale = abs(t) t = t/scale new_TR = (generate_translation_rotor(t)*R_only).normal() return new_TR, scale def scale_TR_translation(TR, scale): R_only = TR(e123) T_only = (TR*~R_only).normal() t = -2*(T_only|no)*scale new_TR = (generate_translation_rotor(t)*R_only).normal() return new_TR def left_gmt_generator(mt=layout.gmt): k_list, l_list, m_list = mt.coords mult_table_vals = mt.data gaDims = mt.shape[1] val_get_left_gmt_matrix = cf._numba_val_get_left_gmt_matrix @numba.njit def get_left_gmt(x_val): return val_get_left_gmt_matrix( x_val, k_list, l_list, m_list, mult_table_vals, gaDims) return get_left_gmt def right_gmt_generator(mt=layout.gmt): return left_gmt_generator(mt.T) get_left_gmt_matrix = left_gmt_generator() get_right_gmt_matrix = right_gmt_generator() def get_line_reflection_matrix(lines, n_power=1): line_array = np.array([l.value for l in lines]) return val_get_line_reflection_matrix(line_array, n_power) @numba.njit def val_get_line_reflection_matrix(line_array: np.ndarray, n_power: int) -> np.ndarray: mat2solve = np.zeros((32, 32), dtype=np.float64) for i in range(line_array.shape[0]): LiMat = get_left_gmt_matrix(line_array[i, :]) tmat = (LiMat @ mask_2minus4) @ LiMat mat2solve += tmat mat = mask1 @ mat2solve/line_array.shape[0] return np.linalg.matrix_power(mat, n_power) @numba.njit def val_truncated_get_line_reflection_matrix(line_array: np.ndarray, n_power: int) -> np.ndarray: mat2solve = np.zeros((32, 32), dtype=np.float64) for i in range(line_array.shape[0]): LiMat = get_left_gmt_matrix(line_array[i, :]) tmat = (LiMat @ mask_2minus4) @ LiMat mat2solve += tmat mat_val = mask1 @ mat2solve mat_val = mat_val[1:6, 1:6].copy()/line_array.shape[0] return np.linalg.matrix_power(mat_val, n_power) @numba.njit @_defunct_wrapper def val_get_line_intersection(L3_val, Ldd_val): return get_line_intersection( layout.MultiVector(L3_val), layout.MultiVector(Ldd_val) ).value @numba.njit def get_line_intersection(L3, Ldd): Xdd = Ldd * no * Ldd + no Xddd = L3 * Xdd * L3 Pd = 0.5*(Xdd+Xddd) P = -(Pd * ninf * Pd) imt = Pd | ninf P_denominator = 2*(imt * imt).value[0] return (P/P_denominator)(1) @numba.njit @_defunct_wrapper def val_midpoint_between_lines(L1_val, L2_val): return midpoint_between_lines(layout.MultiVector(L1_val, L2_val)).value @numba.njit def midpoint_between_lines(L1, L2): L3 = (L1 + L2).normal() Ldd = (L1 - L2).normal() S = get_line_intersection(L3, Ldd) return normalise_n_minus_1((S * ninf * S)(1)) @numba.njit def midpoint_of_line_cluster(line_cluster): return layout.MultiVector(val_midpoint_of_line_cluster(MVArray(line_cluster).value)) @numba.njit
BSD 3-Clause New or Revised License
ltalirz/asetk
asetk/format/cube.py
Plane.__init__
python
def __init__(self, data=None, origin=None, dx=None, dy=None, extent=None): self.data = data self.origin = origin if extent != None and (dx != None or dy != None): print("Error: Please specify either extent or dx, dy") elif extent != None: self.dx = [(extent[1]-extent[0])/self.nx, 0] self.dy = [0, (extent[3]-extent[2])/self.ny] else: self.dx = dx self.dy = dy
Standard constructur, all parameters default to None.
https://github.com/ltalirz/asetk/blob/bdb31934a5eb49d601e492fc98078d27f5dd2ebd/asetk/format/cube.py#L582-L594
from __future__ import division import numpy as np import copy as cp import asetk.atomistic.fundamental as fu import asetk.atomistic.constants as constants import matplotlib.mlab as mlab class Cube(object): dir_indices = { 'x': 0, 'y': 1, 'z': 2, } def __init__(self, filename=None, title=None, comment=None, origin=None, atoms=None, data=None): self.filename = filename self.title = title self.comment = comment self.origin = origin self.atoms = atoms self.data = data self._shape = None @classmethod def from_cube(cls, cube): tmp = cp.deepcopy(cube) return tmp @classmethod def from_file(cls, fname, read_data=False): tmp = Cube() tmp.read_cube_file(fname, read_data=read_data) return tmp @property def cell(self): return self.atoms.cell @cell.setter def cell(self, c): self.atoms.cell = c @property def shape(self): if self.data is not None: return self.data.shape elif self.shape_ is not None: return self.shape_ else: return None @property def nx(self): return self.shape[0] @property def ny(self): return self.shape[1] @property def nz(self): return self.shape[2] @property def dx(self): return self.cell[0] / self.nx @property def dy(self): return self.cell[1] / self.ny @property def dz(self): return self.cell[2] / self.nz @property def dv(self): return np.dot(self.dx, np.cross(self.dy, self.dz)) def __str__(self): text = "Spectrum containing {} spins\n".format(len(self.energylevels)) for i in range(len(self.energylevels)): e = self.energylevels[i] s = self.spins[i] text += 'spin {} : {}\n'.format(s, e.__str__()) return text def read_cube_file(self, fname, read_data=False, v=1): self.filename = fname f = open(fname, 'r') readline = f.readline self.title = readline() self.comment = readline() axes = [0, 1, 2] line = readline().split() natoms = int(line[0]) b2A = constants.a0 / constants.Angstrom self.origin = np.array(line[1:], dtype=float) * b2A shape = np.empty(3,dtype=int) cell = np.empty((3, 3)) for i in range(3): n, x, y, z = [float(s) for s in readline().split()] shape[i] = int(n) cell[i] = n * np.array([x, y, z]) self.shape_ = shape cell = cell * b2A numbers = np.empty(natoms, int) positions = np.empty((natoms, 3)) for i in range(natoms): line = readline().split() numbers[i] = int(line[0]) positions[i] = [float(s) for s in line[2:]] positions *= b2A self.atoms = fu.Atoms(numbers=numbers, positions=positions, cell=cell) if read_data: self.data = np.array(f.read().split(), dtype=float) self.data = self.data.reshape(shape) f.close() def resize(self, shape): shape_old = self.shape for dim in range(3): self.cell[dim] = shape[dim] * self.cell[dim] / shape_old[dim] tmp = np.zeros(shape) m = np.min([shape, self.shape], axis=0) tmp[:m[0], :m[1],:m[2]] = self.data[:m[0], :m[1],:m[2]] self.data = tmp def write_cube_file(self, fname=None): if fname is None: fname = self.filename f = open(fname, 'w') f.write(self.title) f.write(self.comment) A2b = constants.Angstrom / constants.a0 o = self.origin * A2b f.write('{:5d}{:12.6f}{:12.6f}{:12.6f}\n' .format(len(self.atoms), o[0], o[1], o[2])) c = self.cell * A2b for i in range(3): n = self.shape[i] d = c[i] / self.shape[i] f.write('{:5d}{:12.6f}{:12.6f}{:12.6f}\n'.format(n, d[0], d[1], d[2])) positions = self.atoms.get_positions() * A2b numbers = self.atoms.get_atomic_numbers() for Z, (x, y, z) in zip(numbers, positions): f.write('{:5d}{:12.6f}{:12.6f}{:12.6f}{:12.6f}\n'.format(Z, 0.0, x, y, z) ) self.data.tofile(f, sep='\n', format='%12.6e') f.close() def get_index(self, dir, d): c = self.cell if dir is 'x': dmax = c[0][0] step = np.linalg.norm(self.dx) elif dir is 'y': dmax = c[1][1] step = np.linalg.norm(self.dy) elif dir is 'z': dmax = c[2][2] step = np.linalg.norm(self.dz) else: raise ValueError("Did not recognize direction '{}'.".format(dir)) if d > dmax: raise ValueError("Distance {} exceeds maximum distance {} \ along direction {}".format(d,dmax,dir)) index = int(round(d / step)) return index def get_index_above_atoms(self, d, from_below=False, verbose=False): if from_below: zmin = np.min(self.atoms.positions[:,2]) zplane = zmmin - d else: zmax = np.max(self.atoms.positions[:,2]) zplane = zmax + d dz = np.linalg.norm(self.dz) iplane = int(round(zplane / dz)) zplanereal = iplane * dz if verbose: if from_below: delta = zmin - zplanereal else: delta = zplanereal - zmax print("Precise height above atoms: {} Angstroms".format(delta)) return iplane def get_plane_above_atoms(self, d, verbose=False, return_object=None, replica=None, resample=None, from_below=False): iplane = self.get_index_above_atoms(d, from_below, verbose=verbose) return self.get_plane('z', iplane, return_object=return_object, replica=replica, resample=resample) def get_isosurface_above_atoms(self, v, from_below=False, zcut=None, on_grid=False, return_object=None, replica=None, resample=None): plane = np.empty(self.shape[0:2]) missed = 0 dz = np.linalg.norm(self.dz) nz = self.nz if zcut is None: zcut = dz*nz if from_below else 0.0 if from_below: zmax = zcut else: self.data = self.data[:,:,::-1] zmax = nz*dz - zcut for i in range(self.nx): for j in range(self.ny): itmp = np.argmax(self.data[i,j,:] > v) if itmp == 0 or itmp * dz > zmax: plane[i,j] = zmax missed = missed + 1 elif on_grid: plane[i,j] = itmp * dz else: greater = self.data[i,j,itmp] smaller = self.data[i,j,itmp-1] plane[i,j] = dz * (itmp - (greater - v)/(greater-smaller)) if not from_below: self.data = self.data[:,:,::-1] plane = dz*nz - plane print("{} z-values replaced by zcut = {}".format(missed,zcut)) pextent, pdx, pdy = self.get_plane_extent('z', return_vectors=True) plane = Plane(data=plane, origin=self.origin, dx=pdx, dy=pdy) if replica: plane.replicate(replica) if resample: plane.resample(resample) if return_object: return plane else: return plane.data def get_plane(self, dir, i, return_object=False, replica=None, resample=None): shape = self.data.shape dvs = self.atoms.cell / shape ls = [ np.linalg.norm(v) for v in self.atoms.cell ] o = self.origin if dir is 'x' and i < shape[0]: plane = self.data[i, :, :] elif dir is 'y' and i < shape[1]: plane = self.data[:, i, :] elif dir is 'z' and i < shape[2]: plane = self.data[:, :, i] else: msg = "Direction {} not recognized or index {} out of bounds" .format(dir,i) msg += "\nDirection must be 'x', 'y' or 'z'." raise ValueError(msg) pextent, pdx, pdy = self.get_plane_extent(dir, return_vectors=True) plane = Plane(data=plane, origin=o, dx=pdx, dy=pdy) if replica: plane.replicate(replica) if resample: plane.resample(resample) if return_object: return plane else: return plane.data def get_plane_extent(self, dir, return_vectors=False): dvs = self.atoms.cell / self.data.shape ls = [ np.linalg.norm(v) for v in self.atoms.cell ] o = self.origin if dir is 'x': dum, pdx, pdy = dvs pextent = [o[1], o[1]+ls[1], o[2], o[2]+ls[2]] elif dir is 'y': pdy, dum, pdx = dvs pextent = [o[2], o[2]+ls[2], o[0], o[0]+ls[0]] elif dir is 'z': pdx, pdy, dum = dvs pextent = [o[0], o[0]+ls[0], o[1], o[1]+ls[1]] else: print("Cannot recognize direction '{}'".format(dir)) print("Expected 'x', 'y' or 'z'.") if return_vectors: return [pextent, pdx, pdy] else: return pextent def set_plane(self, dir, i, plane): nx, ny, nz = self.nx, self.ny, self.nz npx, npy = plane.shape if dir is 'x' and npx == ny and npy == nz: self.data[i, :, :] = plane elif dir is 'y' and npx == nz and npy == nx: self.data[:, i, :] = plane elif dir is 'z' and npx == nx and npy == ny: self.data[:, :, i] = plane else: print("Direction '{}' and shape '{}' are not compatible." .format(dir, plane.shape)) print("Direction must be 'x', 'y' or 'z'.") return False return True def get_avg(self, dir): if dir is 'x': return np.mean(self.data, axis=0) elif dir is 'y': return np.mean(self.data, axis=1) elif dir is 'z': return np.mean(self.data, axis=2) else: print("Cannot recognize direction '{}'".format(dir)) print("Direction must be 'x', 'y' or 'z'.") def __iadd__(self, c): if self.data.shape != c.data.shape: raise ValueError("Shape of cube files do not agree") self.data += c.data return self def roll(self, dir, shift=None, distance=None): dir_index = self.dir_indices[dir] step = np.linalg.norm(self.cell[dir_index] / self.data.shape[dir_index]) if shift: dist_exact = shift * step elif distance: shift = int(distance / step) dist_exact = shift * step else: raise IOError("Please provide either shift (integer) or distance (float)") print("Rolling cube file by {:.3f} Angstroms along {}." .format(dist_exact,dir)) self.data = np.roll(self.data, shift=shift, axis=dir_index) v = np.zeros(3) v[dir_index] = dist_exact self.atoms.translate(v) class Plane(object):
MIT License
madcowfred/evething
thing/tasks/apitask.py
APITask.parse_xml
python
def parse_xml(self, data): return ET.fromstring(data.encode('utf-8'))
Parse XML and return an ElementTree.
https://github.com/madcowfred/evething/blob/2926ce0afe656f932140a944e07cbc99c6ef7143/thing/tasks/apitask.py#L367-L371
import datetime import hashlib import requests import time try: import xml.etree.cElementTree as ET except ImportError: import xml.etree.ElementTree as ET from billiard import current_process from celery import Task from celery.task.control import broadcast from celery.utils.log import get_task_logger from django.conf import settings from django.core.cache import cache from django.db import connections from django.db.models import Max from urlparse import urljoin from thing.models import APIKey, APIKeyFailure, Event, TaskState from thing.stuff import total_seconds PENALTY_TIME = 12 * 60 * 60 PENALTY_MULT = 0.2 KEY_ERRORS = set([ '202', '203', '204', '205', '207', '210', '211', '212', '220', '222', '223', ]) this_process = None class APITask(Task): abstract = True _logger = get_task_logger(__name__) _session = requests.Session() _session.headers.update({ 'User-Agent': 'EVEthing-tasks (keep-alive)', }) _session.mount('http://', requests.adapters.HTTPAdapter(pool_connections=1, pool_maxsize=1)) _session.mount('https://', requests.adapters.HTTPAdapter(pool_connections=1, pool_maxsize=1)) def init(self, taskstate_id=None, apikey_id=None): global this_process if this_process is None: this_process = int(current_process()._name.split('-')[1]) if settings.STAGGER_APITASK_STARTUP: sleep_for = (this_process - 1) * 2 self._logger.warning('Worker #%d staggered startup: sleeping for %d seconds', this_process, sleep_for) time.sleep(sleep_for) if settings.DEBUG: for db in settings.DATABASES.keys(): connections[db].queries = [] self._started = time.time() self._api_log = [] self._cache_delta = None self._taskstate = None self.apikey = None self.root = None if taskstate_id is not None: try: self._taskstate = TaskState.objects.get(pk=taskstate_id) except TaskState.DoesNotExist: self.log_error('Task not starting: TaskState %d has gone missing', taskstate_id) return False if apikey_id is not None: try: self.apikey = APIKey.objects.get(pk=apikey_id) except APIKey.DoesNotExist: return False else: if not self.apikey.valid: return False if self.apikey.needs_apikeyinfo and getattr(self, 'name') != 'thing.api_key_info': return False def on_failure(self, exc, task_id, args, kwargs, einfo): if self._taskstate is not None: self._taskstate_ready() def on_success(self, retval, task_id, args, kwargs): if self._taskstate is not None: self._taskstate_ready() if retval is True: if settings.DEBUG: total_api = sum(a[1] for a in self._api_log) self.log_warn('[API] %.3fs %d requests', total_api, len(self._api_log)) for url, runtime in self._api_log: self.log_warn('%.3fs %s', runtime, url) for db in sorted(settings.DATABASES.keys()): self.log_warn( '[%s] %.3fs %d queries', db, sum(float(q['time']) for q in connections[db].queries), len(connections[db].queries), ) for query in connections[db].queries: if len(query['sql']) > 500: self.log_warn('%02.3fs %s...', float(query['time']), query['sql'][:500]) else: self.log_warn('%02.3fs %s', float(query['time']), query['sql']) def _taskstate_ready(self): utcnow = datetime.datetime.utcnow() self._taskstate.state = TaskState.READY_STATE self._taskstate.mod_time = utcnow if self._cache_delta is not None: self._taskstate.next_time = utcnow + self._cache_delta + datetime.timedelta(seconds=20) else: self._taskstate.next_time = utcnow + datetime.timedelta(minutes=30) self._taskstate.save(update_fields=('state', 'mod_time', 'next_time')) def fetch_api(self, url, params, use_auth=True, log_error=True): utcnow = datetime.datetime.utcnow() if use_auth: params['keyID'] = self.apikey.keyid cache_key = self._get_cache_key(url, params) cached_data = cache.get(cache_key) if cached_data is None: sleep_for = self._get_backoff() if sleep_for > 0: time.sleep(sleep_for) if use_auth: params['vCode'] = self.apikey.vcode full_url = urljoin(settings.API_HOST, url) start = time.time() try: if params: r = self._session.post(full_url, params) else: r = self._session.get(full_url) data = r.text except Exception, e: self._increment_backoff(e) return False self._api_log.append((url, time.time() - start)) is_apikeyinfo = (getattr(self, 'name') == 'thing.api_key_info') if not r.status_code == requests.codes.ok: if r.status_code == '403' or r.status_code == 403: self._cache_delta = datetime.timedelta(hours=4) self.log_warn('403 error, caching for 4 hours') if is_apikeyinfo and self.apikey: self.apikey.apikeyinfo_errors += 1 if self.apikey.apikeyinfo_errors >= 3: self.invalidate_key('Too many 403 errors from APIKeyInfo') else: self.apikey.save(update_fields=('apikeyinfo_errors',)) else: self.apikey.needs_apikeyinfo = True self.apikey.save(update_fields=('needs_apikeyinfo',)) else: self._increment_backoff('Bad status code: %s' % (r.status_code)) return False elif is_apikeyinfo and self.apikey: self.apikey.apikeyinfo_errors = 0 self.apikey.needs_apikeyinfo = False self.apikey.save(update_fields=('apikeyinfo_errors', 'needs_apikeyinfo')) else: data = cached_data if data: try: self.root = self.parse_xml(data) except Exception: return False current = self.parse_api_date(self.root.find('currentTime').text) until = self.parse_api_date(self.root.find('cachedUntil').text) self._cache_delta = until - current if cached_data is None: if self.apikey is None: cache_expires = total_seconds(self._cache_delta) + 10 else: last_seen = APIKey.objects.filter(keyid=self.apikey.keyid, vcode=self.apikey.vcode).aggregate(m=Max('user__profile__last_seen'))['m'] secs = max(0, total_seconds(utcnow - last_seen)) mult = 1 + (min(20, max(0, secs / PENALTY_TIME)) * PENALTY_MULT) cache_expires = max(0, total_seconds(self._cache_delta) * mult) + 10 self._cache_delta = datetime.timedelta(seconds=cache_expires) if cache_expires >= 0: cache.set(cache_key, data, cache_expires) error = self.root.find('error') if error is not None: if log_error: self.log_error('%s: %s | %s -> %s', error.attrib['code'], error.text, current, until) if error.attrib['code'] in KEY_ERRORS: reason = '%s %s' % (error.attrib['code'], error.text) self.invalidate_key(reason) elif error.attrib['code'].startswith('5') or error.attrib['code'] in ('901', '902', '1001'): self._increment_backoff('API server seems broken') elif error.attrib['code'] == '904': self.log_error('Received 904 error, killing workers!') broadcast('shutdown') return False return True def fetch_url(self, url, params): start = time.time() try: if params: r = self._session.post(url, params) else: r = self._session.get(url) data = r.text except Exception: return False self._api_log.append((url, time.time() - start)) if not r.status_code == requests.codes.ok: return False return data
BSD 2-Clause Simplified License
arise-initiative/robomimic
robomimic/algo/algo.py
Algo.deserialize
python
def deserialize(self, model_dict): self.nets.load_state_dict(model_dict)
Load model from a checkpoint. Args: model_dict (dict): a dictionary saved by self.serialize() that contains the same keys as @self.network_classes
https://github.com/arise-initiative/robomimic/blob/2804dd97dd1625ec861298a35cb677129d3bfacc/robomimic/algo/algo.py#L269-L277
import textwrap from copy import deepcopy from collections import OrderedDict import torch.nn as nn import robomimic.utils.tensor_utils as TensorUtils import robomimic.utils.torch_utils as TorchUtils import robomimic.utils.obs_utils as ObsUtils REGISTERED_ALGO_FACTORY_FUNCS = OrderedDict() def register_algo_factory_func(algo_name): def decorator(factory_func): REGISTERED_ALGO_FACTORY_FUNCS[algo_name] = factory_func return decorator def algo_name_to_factory_func(algo_name): return REGISTERED_ALGO_FACTORY_FUNCS[algo_name] def algo_factory(algo_name, config, modality_shapes, ac_dim, device): assert algo_name == config.algo_name factory_func = algo_name_to_factory_func(algo_name) algo_cls, algo_kwargs = factory_func(config.algo) return algo_cls( algo_config=config.algo, obs_config=config.observation, global_config=config, modality_shapes=modality_shapes, ac_dim=ac_dim, device=device, **algo_kwargs ) class Algo(object): def __init__( self, algo_config, obs_config, global_config, modality_shapes, ac_dim, device ): self.optim_params = deepcopy(algo_config.optim_params) self.algo_config = algo_config self.obs_config = obs_config self.global_config = global_config self.ac_dim = ac_dim self.device = device self.modality_shapes = modality_shapes self.nets = nn.ModuleDict() self._create_shapes(obs_config.modalities, modality_shapes) self._create_networks() self._create_optimizers() assert isinstance(self.nets, nn.ModuleDict) def _create_shapes(self, modalities, modality_shapes): self.obs_shapes = OrderedDict() self.goal_shapes = OrderedDict() self.subgoal_shapes = OrderedDict() for k in modality_shapes: if "obs" in self.obs_config.modalities and k in (self.obs_config.modalities.obs.low_dim + self.obs_config.modalities.obs.image): self.obs_shapes[k] = modality_shapes[k] if "goal" in self.obs_config.modalities and k in (self.obs_config.modalities.goal.low_dim + self.obs_config.modalities.goal.image): self.goal_shapes[k] = modality_shapes[k] if "subgoal" in self.obs_config.modalities and k in (self.obs_config.modalities.subgoal.low_dim + self.obs_config.modalities.subgoal.image): self.subgoal_shapes[k] = modality_shapes[k] def _create_networks(self): raise NotImplementedError def _create_optimizers(self): self.optimizers = dict() self.lr_schedulers = dict() for k in self.optim_params: if k in self.nets: if isinstance(self.nets[k], nn.ModuleList): self.optimizers[k] = [ TorchUtils.optimizer_from_optim_params(net_optim_params=self.optim_params[k], net=self.nets[k][i]) for i in range(len(self.nets[k])) ] self.lr_schedulers[k] = [ TorchUtils.lr_scheduler_from_optim_params(net_optim_params=self.optim_params[k], net=self.nets[k][i], optimizer=self.optimizers[k][i]) for i in range(len(self.nets[k])) ] else: self.optimizers[k] = TorchUtils.optimizer_from_optim_params( net_optim_params=self.optim_params[k], net=self.nets[k]) self.lr_schedulers[k] = TorchUtils.lr_scheduler_from_optim_params( net_optim_params=self.optim_params[k], net=self.nets[k], optimizer=self.optimizers[k]) def process_batch_for_training(self, batch): return batch def train_on_batch(self, batch, epoch, validate=False): assert validate or self.nets.training return OrderedDict() def log_info(self, info): log = OrderedDict() for k in self.optimizers: for i, param_group in enumerate(self.optimizers[k].param_groups): log["Optimizer/{}{}_lr".format(k, i)] = param_group["lr"] return log def on_epoch_end(self, epoch): for k in self.lr_schedulers: if self.lr_schedulers[k] is not None: self.lr_schedulers[k].step() def set_eval(self): self.nets.eval() def set_train(self): self.nets.train() def serialize(self): return self.nets.state_dict()
MIT License
ducksboard/libsaas
libsaas/services/googleanalytics/resources.py
Account.webproperty
python
def webproperty(self, webproperty_id): return WebProperty(self, webproperty_id)
Return the resource corresponding to a single property
https://github.com/ducksboard/libsaas/blob/615981a3336f65be9d51ae95a48aed9ad3bd1c3c/libsaas/services/googleanalytics/resources.py#L155-L159
from libsaas import http, parsers from libsaas.services import base def translate_param(val): return val.replace('_', '-') class QuotaResource(base.RESTResource): @base.apimethod def get(self, userIp=None, quotaUser=None): params = base.get_params(None, locals(), translate_param=translate_param) request = http.Request('GET', self.get_url(), params) return request, parsers.parse_json def create(self, *args, **kwargs): raise base.MethodNotSupported() def update(self, *args, **kwargs): raise base.MethodNotSupported() def delete(self, *args, **kwargs): raise base.MethodNotSupported() class PaginatedQuotaResource(QuotaResource): @base.apimethod def get(self, max_results=None, start_index=None, userIp=None, quotaUser=None): params = base.get_params(None, locals(), translate_param=translate_param) request = http.Request('GET', self.get_url(), params) return request, parsers.parse_json class GoalBase(QuotaResource): path = 'goals' class Goal(GoalBase): pass class Goals(GoalBase, PaginatedQuotaResource): pass class ViewBase(QuotaResource): path = 'profiles' class Views(ViewBase, PaginatedQuotaResource): pass class View(ViewBase): @base.resource(Goals) def goals(self): return Goals(self) @base.resource(Goal) def goal(self, goal_id): return Goal(self, goal_id) class WebPropertyBase(QuotaResource): path = 'webproperties' class WebProperties(WebPropertyBase, PaginatedQuotaResource): pass class WebProperty(WebPropertyBase): @base.resource(Views) def views(self): return Views(self) @base.resource(View) def view(self, profile_id): return View(self, profile_id) class AccountBase(QuotaResource): path = 'accounts' class Accounts(AccountBase, PaginatedQuotaResource): pass class Account(AccountBase): @base.resource(WebProperties) def webproperties(self): return WebProperties(self) @base.resource(WebProperty)
MIT License
derricw/asciisciit
asciisciit/conversions.py
ascii_seq_to_gif
python
def ascii_seq_to_gif(seq, output_path, fps=15.0, font_size=10, font_path=None): images = [] status = StatusBar(len(seq), text="Generating frames: ",) for index, ascii_img in enumerate(seq): if type(ascii_img) == str: text = ascii_img else: text = ascii_img.data images.append( ascii_to_pil(text, font_size=font_size, font_path=font_path ) ) status.update(index) status.complete() duration = 1.0/fps images_np = [np.array(img) for img in images] imageio.mimsave(output_path, images_np, duration=duration)
Creates a gif from a sequence of ascii images. Parameters ---------- output_path : str Path for gif output. fps : float FPS for gif playback. font_size : int Font size for ascii.
https://github.com/derricw/asciisciit/blob/05923c555146730afe19d6c629044c037051d44a/asciisciit/conversions.py#L189-L226
from bisect import bisect import random import os from PIL import Image, ImageOps, ImageDraw, ImageFont import numpy as np import cv2 import imageio from asciisciit.misc import * from asciisciit.lut import get_lut, relative_width DEFAULT_ASPECT_CORRECTION_FACTOR = 6.0/11.0 RESOURCE_DIR = os.path.join(os.path.dirname(__file__),'res') def image_to_ascii(img, scalefactor=0.2, invert=False, equalize=True, lut='simple', aspect_correction_factor=None): if type(img) == str: img = open_pil_img(img) elif type(img) == np.ndarray: img = numpy_to_pil(img) try: text = pil_to_ascii(img, scalefactor, invert, equalize, lut, aspect_correction_factor) except: raise TypeError("That image type doesn't work. Try PIL, Numpy, or file path...") return text def pil_to_ascii(img, scalefactor=0.2, invert=False, equalize=True, lut='simple', aspect_correction_factor=None ): lookup = get_lut(lut) if aspect_correction_factor is None: aspect_correction_factor = get_aspect_correction_factor(lookup.exemplar) img = img.resize( (int(img.size[0]*scalefactor), int(img.size[1]*scalefactor*aspect_correction_factor)), Image.BILINEAR) img = img.convert("L") if equalize: img = ImageOps.equalize(img) if invert: img = ImageOps.invert(img) img = np.array(img, dtype=np.uint8) return u"\n" + u"".join(lookup.apply(img).flatten().tolist()) def ascii_to_pil(text, font_size=10, bg_color=(20, 20, 20), fg_color=(255, 255, 255), font_path=None): font = get_font(font_path, font_size) if relative_width(text[1]) == 2: font_width, font_height = font.getsize(u"\u3000") else: font_width, font_height = font.getsize(u" ") img_height, img_width = get_ascii_image_size(text) y_padding = 1 out_img = np.zeros(((font_height+y_padding)*img_height, font_width*img_width, 3), dtype=np.uint8) out_img[:, :, 0] += bg_color[0] out_img[:, :, 1] += bg_color[1] out_img[:, :, 2] += bg_color[2] img = Image.fromarray(out_img) draw = ImageDraw.Draw(img) for index, line in enumerate(text.split("\n")): y = (font_height+y_padding)*index draw.text((0, y), line, fg_color, font=font) return img
MIT License
ecdavis/pants
pants/http/websocket.py
WebSocket.local_address
python
def local_address(self): if self._local_address is not None: return self._local_address elif self._connection: return self._connection.local_address else: return None
The address of the WebSocket on the local machine. By default, this will be the value of ``socket.getsockname`` or None. It is possible for user code to override the default behaviour and set the value of the property manually. In order to return the property to its default behaviour, user code then has to delete the value. Example:: # default behaviour channel.local_address = custom_value # channel.local_address will return custom_value now del channel.local_address # default behaviour
https://github.com/ecdavis/pants/blob/88129d24020e95b71e8d0260a111dc0b457b0676/pants/http/websocket.py#L614-L635
import base64 import hashlib import re import struct import sys if sys.platform == "win32": from time import clock as time else: from time import time from pants.stream import StreamBufferOverflow from pants.http.utils import log try: from netstruct import NetStruct as _NetStruct except ImportError: class _NetStruct(object): def __init__(self, *a, **kw): raise NotImplementedError CLOSE_REASONS = { 1000: 'Normal Closure', 1001: 'Endpoint Going Away', 1002: 'Protocol Error', 1003: 'Unacceptable Data Type', 1005: 'No Status Code', 1006: 'Abnormal Close', 1007: 'Invalid UTF-8 Data', 1008: 'Message Violates Policy', 1009: 'Message Too Big', 1010: 'Extensions Not Present', 1011: 'Unexpected Condition Prevented Fulfillment', 1015: 'TLS Handshake Error' } WEBSOCKET_KEY = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" WEBSOCKET_VERSIONS = (13, 8, 0) FRAME_CONTINUATION = 0 FRAME_TEXT = 1 FRAME_BINARY = 2 FRAME_CLOSE = 8 FRAME_PING = 9 FRAME_PONG = 10 EntireMessage = object() RegexType = type(re.compile("")) Struct = struct.Struct STRUCT_H = Struct("!H") STRUCT_Q = Struct("!Q") class WebSocket(object): protocols = None allow_old_handshake = False def __init__(self, request, *arguments): self._connection = request.connection self.engine = self._connection.engine request.auto_finish = False self._arguments = arguments self.fileno = self._connection.fileno self._remote_address = None self._local_address = None self._pings = {} self._last_ping = 0 self._read_delimiter = EntireMessage self._recv_buffer_size_limit = self._buffer_size self._recv_buffer = "" self._read_buffer = None self._rb_type = None self._frag_frame = None self.connected = False self._closed = False self.is_secure = request.is_secure fail = False headers = {} if not request.headers.get('Connection','').lower() == 'upgrade' and not request.headers.get('Upgrade','').lower() == 'websocket': fail = True elif not self._safely_call(self.on_handshake, request, headers): fail = True if 'Sec-WebSocket-Version' in request.headers: if not 'Sec-WebSocket-Key' in request.headers: fail = True else: accept = base64.b64encode(hashlib.sha1( request.headers['Sec-WebSocket-Key'] + WEBSOCKET_KEY ).digest()) headers['Upgrade'] = 'websocket' headers['Connection'] = 'Upgrade' headers['Sec-WebSocket-Accept'] = accept self.version = int(request.headers['Sec-WebSocket-Version']) if self.version not in WEBSOCKET_VERSIONS: headers['Sec-WebSocket-Version'] = False fail = True elif not self.allow_old_handshake: fail = True else: self.version = 0 self._headers = headers self._request = request self._connection.on_read = self._finish_handshake self._connection.on_close = self._con_close self._connection.on_write = self._con_write self._connection.read_delimiter = 8 return if fail: if 'Sec-WebSocket-Version' in headers: request.send_status(400) request.send_headers({ 'Content-Type': 'text/plain', 'Content-Length': 15, 'Sec-WebSocket-Version': ', '.join(str(x) for x in WEBSOCKET_VERSIONS) }) request.send('400 Bad Request') else: request.send_status(426) headers = { 'Content-Type': 'text/plain', 'Content-Length': '20', 'Sec-WebSocket-Version': ', '.join(str(x) for x in WEBSOCKET_VERSIONS) } request.send_headers(headers) request.send("426 Upgrade Required") request.finish() return request.send_status(101) request.send_headers(headers) self._connection.on_read = self._con_read self._connection.on_close = self._con_close self._connection.on_write = self._con_write self._connection.read_delimiter = None self.connected = True self._safely_call(self.on_connect, *self._arguments) del self._arguments def _finish_handshake(self, key3): self._connection.read_delimiter = None request = self._request headers = self._headers del self._headers del self._request scheme = 'wss' if self.is_secure else 'ws' request.send_status(101) headers.update({ 'Upgrade': 'WebSocket', 'Connection': 'Upgrade', 'Sec-WebSocket-Origin': request.headers['Origin'], 'Sec-WebSocket-Location': '%s://%s%s' % ( scheme, request.host, request.url) }) request.send_headers(headers) try: request.send(challenge_response( request.headers, key3)) except ValueError: log.warning("Malformed WebSocket challenge to %r." % self) self.close(False) return self._expect_frame = True self.connected = True self._connection.on_read = self._con_old_read self._safely_call(self.on_connect, *self._arguments) del self._arguments @property def remote_address(self): if self._remote_address is not None: return self._remote_address elif self._connection: return self._connection.remote_address else: return None @remote_address.setter def remote_address(self, val): self._remote_address = val @remote_address.deleter def remote_address(self): self._remote_address = None @property
Apache License 2.0
aspuru-guzik-group/janus
NN.py
size_ring_counter
python
def size_ring_counter(ring_ls): ring_counter = [] if ring_ls == (None, None): return [0 for i in range(19)] mol_ring_ls = [Chem.MolFromSmiles(smi) for smi in ring_ls] conseq_dbl_bnd_in_ring = 0 for item in mol_ring_ls: conseq_dbl_bnd_in_ring += count_conseq_double(item) ring_counter.append(conseq_dbl_bnd_in_ring) for i in range(3, 21): count = 0 for mol_ring in mol_ring_ls: if mol_ring.GetNumAtoms() == i: count += 1 ring_counter.append(count) return ring_counter
Get the number of rings of sizes 3 to 20 and the number of consequtive double bonds in a ring Parameters: ring_ls (list) : list of rings of a molecule Returns (list) : Of size 19 (1 for number of conseq. double bonds) (18 for number of rings between size 3 to 20)
https://github.com/aspuru-guzik-group/janus/blob/426ddf29898664f9c444f333562ba58185701e59/NN.py#L163-L193
import os import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import rdkit from rdkit import Chem from rdkit import RDLogger from rdkit.Chem import Descriptors RDLogger.DisableLog('rdApp.*') import inspect from collections import OrderedDict import multiprocessing manager = multiprocessing.Manager() lock = multiprocessing.Lock() def get_rot_bonds_posn(mol): RotatableBond = Chem.MolFromSmarts('*-&!@*') rot = mol.GetSubstructMatches(RotatableBond) return rot def get_bond_indeces(mol, rot): bonds_idx = [] for i in range(len(rot)): bond = mol.GetBondBetweenAtoms(rot[i][0],rot[i][1]) bonds_idx.append(bond.GetIdx()) return bonds_idx def obtain_rings(smi): mol = Chem.MolFromSmiles(smi) rot = get_rot_bonds_posn(mol) if len(rot) == 0: return None, None bond_idx = get_bond_indeces(mol, rot) new_mol = Chem.FragmentOnBonds(mol, bond_idx, addDummies=False) new_smile = Chem.MolToSmiles(new_mol) smile_split_list = new_smile.split(".") rings = [] for item in smile_split_list: if '1' in item: rings.append(item) return rings def count_atoms(mol, atomic_num): pat = Chem.MolFromSmarts("[#{}]".format(atomic_num)) return len(mol.GetSubstructMatches(pat)) def get_num_bond_types(mol): bonds = mol.GetBonds() num_bonds = 0 num_double = 0 num_triple = 0 num_single = 0 num_aromatic = 0 for b in bonds: num_bonds += 1 if b.GetBondType() == rdkit.Chem.rdchem.BondType.SINGLE: num_single += 1 if b.GetBondType() == rdkit.Chem.rdchem.BondType.DOUBLE: num_double += 1 if b.GetBondType() == rdkit.Chem.rdchem.BondType.TRIPLE: num_triple += 1 if b.GetBondType() == rdkit.Chem.rdchem.BondType.AROMATIC: num_aromatic += 1 if num_bonds == 0: return [0, 0, 0, 0] else: return [num_single/num_bonds, num_double/num_bonds, num_triple/num_bonds, num_aromatic/num_bonds] def count_conseq_double(mol): bonds = mol.GetBonds() previous_BType = None count_conseq_doub = 0 for b in bonds: curr_BType = b.GetBondType() if previous_BType == curr_BType and curr_BType == rdkit.Chem.rdchem.BondType.DOUBLE: count_conseq_doub += 1 previous_BType = curr_BType return count_conseq_doub
Apache License 2.0
wglass/lighthouse
lighthouse/service.py
Service.reset_status
python
def reset_status(self): self.is_up = collections.defaultdict(lambda: None)
Sets the up/down status of the service ports to the default state. Useful for when the configuration is updated and the checks involved in determining the status might have changed.
https://github.com/wglass/lighthouse/blob/f4ce6550895acc31e433ede0c05d366718a3ffe5/lighthouse/service.py#L96-L103
import collections import logging import six from .configurable import Configurable from .check import Check logger = logging.getLogger(__name__) class Service(Configurable): config_subdirectory = "services" def __init__(self): self.host = None self.ports = set() self.configured_ports = None self.discovery = None self.checks = collections.defaultdict(dict) self.check_interval = None self.is_up = collections.defaultdict(lambda: None) self.metadata = {} @classmethod def validate_config(cls, config): if "discovery" not in config: raise ValueError("No discovery method defined.") if not any([item in config for item in ["port", "ports"]]): raise ValueError("No port(s) defined.") cls.validate_check_configs(config) @classmethod def validate_check_configs(cls, config): if "checks" not in config: raise ValueError("No checks defined.") if "interval" not in config["checks"]: raise ValueError("No check interval defined.") for check_name, check_config in six.iteritems(config["checks"]): if check_name == "interval": continue Check.from_config(check_name, check_config) def apply_config(self, config): self.host = config.get("host", "127.0.0.1") self.configured_ports = config.get("ports", [config.get("port")]) self.discovery = config["discovery"] self.metadata = config.get("metadata", {}) self.update_ports() self.check_interval = config["checks"]["interval"] self.update_checks(config["checks"])
Apache License 2.0
wesselb/gpar
gpar/model.py
GPAR.__or__
python
def __or__(self, x_y_w): x, y, w = x_y_w gpar, x_ind = self.copy(), self.x_ind for is_last, ((y, w, mask), model) in last( zip(per_output(y, w, keep=self.impute), self.layers) ): x = x[mask] f, noise = model() obs = self._obs(x, x_ind, y, w, f, noise) gpar.layers.append(construct_model(f | obs, noise)) if not is_last: x, x_ind = self._update_inputs(x, x_ind, y, f, obs) return gpar
Condition on data. Args: x (tensor): Inputs. y (tensor): Outputs. w (tensor): Weights. Returns: :class:`.gpar.GPAR`: Updated GPAR model.
https://github.com/wesselb/gpar/blob/70f5cb7cd2dec075e33dd7d9cd133b5bc1798777/gpar/model.py#L148-L176
import logging from lab import B from plum import Dispatcher from stheno import Obs, PseudoObs __all__ = ["GPAR"] log = logging.getLogger(__name__) _dispatch = Dispatcher() def merge(x, updates, to_update): concat = B.concat(x[~to_update], updates, axis=0) i_original = 0 i_update = B.sum(~to_update) indices = [] for i in range(len(to_update)): if to_update[i]: indices.append(i_update) i_update = i_update + 1 else: indices.append(i_original) i_original = i_original + 1 return B.take(concat, indices) def construct_model(f, noise): return lambda: (f, noise) def last(xs, select=None): if select is not None: select = set(select) saved_x = None i = -1 def should_yield(i_): return i >= 0 and (select is None or i_ in select) for x in xs: if should_yield(i): yield False, saved_x saved_x = x i += 1 if saved_x is not None and should_yield(i): yield True, saved_x class GPAR: def __init__(self, replace=False, impute=False, x_ind=None): self.replace = replace self.impute = impute self.layers = [] self.sparse = x_ind is not None self.x_ind = None if x_ind is None else x_ind def copy(self): gpar = GPAR(replace=self.replace, impute=self.impute, x_ind=self.x_ind) return gpar def add_layer(self, model_constructor): gpar = self.copy() gpar.layers = list(self.layers) + [model_constructor] return gpar
MIT License
edx/repo-tools
edx_repo_tools/data.py
Person.associated_with
python
def associated_with(self, *institutions): if self.agreement != 'institution': return False if self.expires_on and self.expires_on < date.today(): return False institutions = [inst.lower() for inst in institutions] if self.institution and self.institution.lower() in institutions: return True return False
Return True if this Person is associated with an institution in ``institutions``. Arguments: *institutions: The institutions to check against
https://github.com/edx/repo-tools/blob/69c03ce99a4a6d87bc0c0fecaf1a3cbdb2a3b281/edx_repo_tools/data.py#L106-L125
from datetime import date import functools import logging import os.path import click from github3.exceptions import NotFoundError from lazy import lazy import yaml from edx_repo_tools.auth import pass_github logging.basicConfig() LOGGER = logging.getLogger(__name__) OPEN_EDX_YAML = 'openedx.yaml' def iter_nonforks(hub, orgs): for org in orgs: for repo in hub.organization(org).repositories(): if repo.fork: LOGGER.debug("Skipping %s because it is a fork", repo.full_name) else: yield repo def iter_openedx_yaml(hub, orgs, branches=None): for repo in iter_nonforks(hub, orgs): for branch in (branches or [repo.default_branch]): try: contents = repo.file_contents(OPEN_EDX_YAML, ref=branch) except NotFoundError: contents = None if contents is not None: LOGGER.debug("Found openedx.yaml at %s:%s", repo.full_name, branch) try: data = yaml.safe_load(contents.decoded) except Exception as exc: LOGGER.error("Couldn't parse openedx.yaml from %s:%s, skipping repo", repo.full_name, branch, exc_info=True) else: if data is not None: yield repo, data break class Person: def __init__(self, username, name, email, agreement, email_ok=True, other_emails=None, institution=None, committer=None, jira=None, comments=None, expires_on=None, before=None, beta=None, is_robot=None, ): self.username = username self.name = name self.email = email self.email_ok = email_ok self.other_emails = other_emails self.agreement = agreement self.institution = institution self.committer = committer self.jira = jira self.comments = comments self.expires_on = expires_on self.before = before self.beta = beta self.is_robot = is_robot @classmethod def from_yaml(cls, username, yaml_data): return cls(username=username, **yaml_data)
Apache License 2.0
mayank-git-hub/ete-speech-recognition
pytorch_backend/nets_utils.py
pad_list
python
def pad_list(xs, pad_value): n_batch = len(xs) max_len = max(x.size(0) for x in xs) pad = xs[0].new(n_batch, max_len, *xs[0].size()[1:]).fill_(pad_value) for i in range(n_batch): pad[i, :xs[i].size(0)] = xs[i] return pad
Function to pad values :param list xs: list of torch.Tensor [(L_1, D), (L_2, D), ..., (L_B, D)] :param float pad_value: value for padding :return: padded tensor (B, Lmax, D) :rtype: torch.Tensor
https://github.com/mayank-git-hub/ete-speech-recognition/blob/7c768c67c738a7c6c404f18e215c0d8e81bb21ce/pytorch_backend/nets_utils.py#L18-L33
import numpy as np import torch def to_device(m, x): assert isinstance(m, torch.nn.Module) device = next(m.parameters()).device return x.to(device)
Apache License 2.0
getsenic/gatt-python
gatt/gatt_linux.py
DeviceManager.stop_discovery
python
def stop_discovery(self): try: self._adapter.StopDiscovery() except dbus.exceptions.DBusException as e: if (e.get_dbus_name() == 'org.bluez.Error.Failed') and (e.get_dbus_message() == 'No discovery started'): pass else: raise _error_from_dbus_error(e)
Stops the discovery started with `start_discovery`
https://github.com/getsenic/gatt-python/blob/e1b147d54ff199571b6c0b43bdd3a9e1ce03850c/gatt/gatt_linux.py#L151-L161
try: import dbus import dbus.mainloop.glib except ImportError: import sys print("Module 'dbus' not found") print("Please run: sudo apt-get install python3-dbus") print("See also: https://github.com/getsenic/gatt-python#installing-gatt-sdk-for-python") sys.exit(1) import re from gi.repository import GObject from . import errors dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) dbus.mainloop.glib.threads_init() class DeviceManager: def __init__(self, adapter_name): self.listener = None self.adapter_name = adapter_name self._bus = dbus.SystemBus() try: adapter_object = self._bus.get_object('org.bluez', '/org/bluez/' + adapter_name) except dbus.exceptions.DBusException as e: raise _error_from_dbus_error(e) object_manager_object = self._bus.get_object("org.bluez", "/") self._adapter = dbus.Interface(adapter_object, 'org.bluez.Adapter1') self._adapter_properties = dbus.Interface(self._adapter, 'org.freedesktop.DBus.Properties') self._object_manager = dbus.Interface(object_manager_object, "org.freedesktop.DBus.ObjectManager") self._device_path_regex = re.compile('^/org/bluez/' + adapter_name + '/dev((_[A-Z0-9]{2}){6})$') self._devices = {} self._discovered_devices = {} self._interface_added_signal = None self._properties_changed_signal = None self._main_loop = None self.update_devices() @property def is_adapter_powered(self): return self._adapter_properties.Get('org.bluez.Adapter1', 'Powered') == 1 @is_adapter_powered.setter def is_adapter_powered(self, powered): return self._adapter_properties.Set('org.bluez.Adapter1', 'Powered', dbus.Boolean(powered)) def run(self): if self._main_loop: return self._interface_added_signal = self._bus.add_signal_receiver( self._interfaces_added, dbus_interface='org.freedesktop.DBus.ObjectManager', signal_name='InterfacesAdded') self._properties_changed_signal = self._bus.add_signal_receiver( self._properties_changed, dbus_interface=dbus.PROPERTIES_IFACE, signal_name='PropertiesChanged', arg0='org.bluez.Device1', path_keyword='path') def disconnect_signals(): for device in self._devices.values(): device.invalidate() self._properties_changed_signal.remove() self._interface_added_signal.remove() self._main_loop = GObject.MainLoop() try: self._main_loop.run() disconnect_signals() except Exception: disconnect_signals() raise def stop(self): if self._main_loop: self._main_loop.quit() self._main_loop = None def _manage_device(self, device): existing_device = self._devices.get(device.mac_address) if existing_device is not None: existing_device.invalidate() self._devices[device.mac_address] = device def update_devices(self): managed_objects = self._object_manager.GetManagedObjects().items() possible_mac_addresses = [self._mac_address(path) for path, _ in managed_objects] mac_addresses = [m for m in possible_mac_addresses if m is not None] new_mac_addresses = [m for m in mac_addresses if m not in self._devices] for mac_address in new_mac_addresses: self.make_device(mac_address) def devices(self): self.update_devices() return self._devices.values() def start_discovery(self, service_uuids=[]): discovery_filter = {'Transport': 'le'} if service_uuids: discovery_filter['UUIDs'] = service_uuids try: self._adapter.SetDiscoveryFilter(discovery_filter) self._adapter.StartDiscovery() except dbus.exceptions.DBusException as e: if e.get_dbus_name() == 'org.bluez.Error.NotReady': raise errors.NotReady( "Bluetooth adapter not ready. " "Set `is_adapter_powered` to `True` or run 'echo \"power on\" | sudo bluetoothctl'.") if e.get_dbus_name() == 'org.bluez.Error.InProgress': pass else: raise _error_from_dbus_error(e)
MIT License
tilezen/tilequeue
tilequeue/query/postgres.py
make_db_data_fetcher
python
def make_db_data_fetcher(postgresql_conn_info, template_path, reload_templates, query_cfg, io_pool): sources = parse_source_data(query_cfg) queries_generator = make_queries_generator( sources, template_path, reload_templates) return DataFetcher( postgresql_conn_info, queries_generator, io_pool)
Returns an object which is callable with the zoom and unpadded bounds and which returns a list of rows.
https://github.com/tilezen/tilequeue/blob/911e618a3162877aea1c13fb0e2632c264c8e724/tilequeue/query/postgres.py#L279-L290
from collections import namedtuple from jinja2 import Environment from jinja2 import FileSystemLoader from psycopg2.extras import RealDictCursor from tilequeue.query import DBConnectionPool from tilequeue.transform import calculate_padded_bounds import sys TemplateSpec = namedtuple('TemplateSpec', 'template start_zoom end_zoom') DataSource = namedtuple('DataSource', 'name template_specs') class TemplateFinder(object): def __init__(self, jinja_environment, cache_templates=False): self.environment = jinja_environment self.cache_templates = cache_templates if cache_templates: self.template_cache = {} def __call__(self, source_name): template = None if self.cache_templates: template = self.template_cache.get(source_name) if not template: template = self.environment.get_template(source_name) if self.cache_templates: self.template_cache[source_name] = template return template class TemplateQueryGenerator(object): def __init__(self, template_finder): self.template_finder = template_finder def __call__(self, source, bounds, zoom): template = self.template_finder(source) padded_bounds = dict( polygon=bounds, line=bounds, point=bounds, ) query = template.render(bounds=padded_bounds, zoom=zoom) return query class SourcesQueriesGenerator(object): def __init__(self, sources, query_generator): self.sources = sources self.query_generator = query_generator def __call__(self, zoom, bounds): queries = [] for source in self.sources: template_queries = [] for template_spec in source.template_specs: if template_spec.start_zoom <= zoom < template_spec.end_zoom: template_query = self.query_generator( template_spec.template, bounds, zoom) template_queries.append(template_query) if template_queries: source_query = '\nUNION ALL\n'.join(template_queries) queries.append(source_query) return queries def jinja_filter_geometry(value): return 'ST_AsBinary(%s)' % value def jinja_filter_bbox_filter(bounds, geometry_col_name, srid=3857): min_point = 'ST_MakePoint(%.12f, %.12f)' % (bounds[0], bounds[1]) max_point = 'ST_MakePoint(%.12f, %.12f)' % (bounds[2], bounds[3]) bbox_no_srid = 'ST_MakeBox2D(%s, %s)' % (min_point, max_point) bbox = 'ST_SetSrid(%s, %d)' % (bbox_no_srid, srid) bbox_filter = '%s && %s' % (geometry_col_name, bbox) return bbox_filter def jinja_filter_bbox_intersection(bounds, geometry_col_name, srid=3857): min_point = 'ST_MakePoint(%.12f, %.12f)' % (bounds[0], bounds[1]) max_point = 'ST_MakePoint(%.12f, %.12f)' % (bounds[2], bounds[3]) bbox_no_srid = 'ST_MakeBox2D(%s, %s)' % (min_point, max_point) bbox = 'ST_SetSrid(%s, %d)' % (bbox_no_srid, srid) bbox_intersection = 'st_intersection(%s, %s)' % (geometry_col_name, bbox) return bbox_intersection def jinja_filter_bbox_padded_intersection( bounds, geometry_col_name, pad_factor=1.1, srid=3857): padded_bounds = calculate_padded_bounds(pad_factor, bounds) return jinja_filter_bbox_intersection( padded_bounds.bounds, geometry_col_name, srid) def jinja_filter_bbox(bounds, srid=3857): min_point = 'ST_MakePoint(%.12f, %.12f)' % (bounds[0], bounds[1]) max_point = 'ST_MakePoint(%.12f, %.12f)' % (bounds[2], bounds[3]) bbox_no_srid = 'ST_MakeBox2D(%s, %s)' % (min_point, max_point) bbox = 'ST_SetSrid(%s, %d)' % (bbox_no_srid, srid) return bbox def jinja_filter_bbox_overlaps(bounds, geometry_col_name, srid=3857): min_point = 'ST_MakePoint(%.12f, %.12f)' % (bounds[0], bounds[1]) max_point = 'ST_MakePoint(%.12f, %.12f)' % (bounds[2], bounds[3]) bbox_no_srid = 'ST_MakeBox2D(%s, %s)' % (min_point, max_point) bbox = 'ST_SetSrid(%s, %d)' % (bbox_no_srid, srid) bbox_filter = '((%(col)s && %(bbox)s) AND (' ' st_overlaps(%(col)s, %(bbox)s) OR' ' st_contains(%(bbox)s, %(col)s)' '))' % dict(col=geometry_col_name, bbox=bbox) return bbox_filter def execute_query(conn, query): try: cursor = conn.cursor(cursor_factory=RealDictCursor) cursor.execute(query) rows = list(cursor.fetchall()) return rows except Exception: try: conn.close() except Exception: pass raise class DataFetchException(Exception): def __init__(self, exceptions): self.exceptions = exceptions msgs = ', '.join([x.message for x in exceptions]) super(DataFetchException, self).__init__(msgs) class DataFetcher(object): def __init__(self, conn_info, queries_generator, io_pool): self.conn_info = dict(conn_info) self.queries_generator = queries_generator self.io_pool = io_pool self.dbnames = self.conn_info.pop('dbnames') self.dbnames_query_index = 0 self.sql_conn_pool = DBConnectionPool( self.dbnames, self.conn_info) def fetch_tiles(self, all_data): for data in all_data: yield self, data def __call__(self, zoom, unpadded_bounds): queries = self.queries_generator(zoom, unpadded_bounds) n_conns = len(queries) assert n_conns, 'no queries' with self.sql_conn_pool.get_conns(n_conns) as sql_conns: async_results = [] for query, conn in zip(queries, sql_conns): async_result = self.io_pool.apply_async( execute_query, (conn, query)) async_results.append(async_result) all_source_rows = [] async_exceptions = [] for async_result in async_results: try: source_rows = async_result.get() all_source_rows.extend(source_rows) except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() async_exception = exc_value async_exceptions.append(async_exception) continue if async_exceptions: raise DataFetchException(async_exceptions) read_rows = [] for row in all_source_rows: read_row = {} for k, v in row.items(): if isinstance(v, buffer): v = bytes(v) if v is not None: read_row[k] = v read_rows.append(read_row) return read_rows def make_jinja_environment(template_path): environment = Environment(loader=FileSystemLoader(template_path)) environment.filters['geometry'] = jinja_filter_geometry environment.filters['bbox_filter'] = jinja_filter_bbox_filter environment.filters['bbox_intersection'] = jinja_filter_bbox_intersection environment.filters['bbox_padded_intersection'] = ( jinja_filter_bbox_padded_intersection) environment.filters['bbox'] = jinja_filter_bbox environment.filters['bbox_overlaps'] = jinja_filter_bbox_overlaps return environment def make_queries_generator(sources, template_path, reload_templates): jinja_environment = make_jinja_environment(template_path) cache_templates = not reload_templates template_finder = TemplateFinder(jinja_environment, cache_templates) query_generator = TemplateQueryGenerator(template_finder) queries_generator = SourcesQueriesGenerator(sources, query_generator) return queries_generator def parse_source_data(queries_cfg): sources_cfg = queries_cfg['sources'] sources = [] for source_name, templates in sources_cfg.items(): template_specs = [] for template_data in templates: template = template_data['template'] start_zoom = int(template_data.get('start_zoom', 0)) end_zoom = int(template_data.get('end_zoom', 21)) template_spec = TemplateSpec(template, start_zoom, end_zoom) template_specs.append(template_spec) source = DataSource(source_name, template_specs) sources.append(source) return sources
MIT License
sheynkman-lab/long-read-proteogenomics
modules/visualization_track/src/make_pacbio_cds_gtf.py
make_pacbio_cds_gtf
python
def make_pacbio_cds_gtf(sample_gtf, refined_orfs, called_orfs, pb_gene, name, include_transcript): refined_db = pd.read_table(refined_orfs) representative_accessions = refined_db['base_acc'].to_list() gtf = gtfparse.read_gtf(sample_gtf) gtf = gtf[['seqname', 'feature', 'start', 'end', 'strand', 'transcript_id']] gtf = gtf[gtf['feature'] == 'exon'] gtf.columns = ['chr', 'feat', 'start', 'end', 'strand', 'acc'] gtf = gtf[gtf.acc.isin(representative_accessions)] pbs = defaultdict(lambda: ['chr', 'strand', [], [], [],[]]) for i, row in gtf.iterrows(): chr, feat, start, end, strand, acc = row pbs[acc][0] = chr pbs[acc][1] = strand pbs[acc][2].append([int(start), int(end)]) for acc, infos in pbs.items(): strand = infos[1] if strand == '+': infos[2] = sorted(infos[2]) elif strand == '-': infos[2] = sorted(infos[2], reverse=True) infos[3] = np.array([end-start+1 for [start, end] in infos[2]]) infos[4] = np.cumsum(infos[3]) infos[5] = infos[4] - infos[3] ranges = pd.read_table(called_orfs)[['pb_acc', 'orf_start', 'orf_end']] ranges = pd.merge( ranges, refined_db[['base_acc', 'CPM']], left_on='pb_acc', right_on='base_acc', how='inner') ranges = ranges[['pb_acc', 'orf_start', 'orf_end', 'CPM']] pb_gene = pd.read_table(pb_gene) pb_gene = pd.Series(pb_gene.gene.values, index=pb_gene.pb_acc).to_dict() with open(f"{name}_with_cds.gtf", 'w') as ofile: for i, row in ranges.iterrows(): acc, orf_start, orf_end, cpm = row cpm = round(cpm) orf_end = orf_end - 3 if acc in pbs: if acc in pb_gene.keys(): gene = pb_gene[acc] else: gene = '-' if gene == '-': continue infos = pbs[acc] chr, strand, coords, blens, cblens, pblens = infos logger.info(f"\n{acc}\t{strand}\n{orf_start} - {orf_end}\n{blens}\n{cblens}") i1, delta1 = get_first_block_index(orf_start, cblens, pblens) i2, delta2 = get_first_block_index(orf_end, cblens, pblens) if strand == '+': orf_coords = make_cds_coords_positive_strand(i1, delta1, i2, delta2, coords) elif strand == '-': orf_coords = make_cds_coords_negative_strand(i1, delta1, i2, delta2, coords) acc_w_gene_w_cpm = gene + '|' + acc + '|' + str(cpm) out_acc = f'gene_id "{gene}"; transcript_id "{acc_w_gene_w_cpm}";' if include_transcript: tstart, tend = get_min_and_max_coords_from_exon_chain(coords) ofile.write('\t'.join([chr, 'hg38_canon', 'transcript', str(tstart), str(tend), '.', strand, '.', out_acc]) + '\n') for [start, end] in coords: ofile.write('\t'.join([chr, 'hg38_canon', 'exon', str(start), str(end), '.', strand, '.', out_acc]) + '\n') for [start, end] in orf_coords: ofile.write('\t'.join([chr, 'hg38_canon', 'CDS', str(start), str(end), '.', strand, '.', out_acc]) + '\n')
Makes PacBio CDS and saves file with CDS Args: sample_gtf (filename): sample_gtf file refined_orfs (filename): aggregate_orf info. from Refined DB called_orfs (filename): orf calls from ORF_Calling pb_gene (filename): PacBio gene name cross reference name (string): name of sample include_transcript (bool): whether to include transcript in saved gtf file
https://github.com/sheynkman-lab/long-read-proteogenomics/blob/0845d0c6b9ecfc07d5893e52ef1b5a83457df3e2/modules/visualization_track/src/make_pacbio_cds_gtf.py#L188-L285
import pandas as pd import numpy as np from collections import defaultdict import copy import argparse import gtfparse import logging logger = logging.getLogger('cds_logger') logger.setLevel(logging.DEBUG) fh = logging.FileHandler('make_pacbio_cds.log') fh.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.ERROR) formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') ch.setFormatter(formatter) fh.setFormatter(formatter) logger.addHandler(ch) logger.addHandler(fh) def string_to_boolean(string): if isinstance(string, bool): return str if string.lower() in ('yes', 'true', 't', 'y', '1'): return True elif string.lower() in ('no', 'false', 'f', 'n', '0'): return False else: raise argparse.ArgumentTypeError('Boolean value expected.') def get_first_block_index(orf_coord, cblens, pblens): for i, cblen in enumerate(cblens): if orf_coord <= cblen: delta = cblen - orf_coord return i, delta logger.warning(f"ORF COORDINATE IS NOT FOUND WITHIN BLOCKS") return i, 0 def make_cds_coords_positive_strand(i1, delta1, i2, delta2, coords): orf_coords = copy.deepcopy(coords) orf_coords = orf_coords[i1: i2+1] orf_coords[0][0] = orf_coords[0][1] - delta1 orf_coords[-1][1] = orf_coords[-1][1] - delta2 return orf_coords def make_cds_coords_negative_strand(i1, delta1, i2, delta2, coords): orf_coords = copy.deepcopy(coords) orf_coords = orf_coords[i1: i2+1] orf_coords[0][1] = orf_coords[0][0] + delta1 orf_coords[-1][0] = orf_coords[-1][0] + delta2 return orf_coords def ceiling_cpm(cpm, ceiling = 1000): if cpm > ceiling: return ceiling else: return cpm def get_min_and_max_coords_from_exon_chain(coords): min_coord = min(map(min, coords)) max_coord = max(map(max, coords)) return min_coord, max_coord
MIT License
clusterhq/flocker
flocker/apiclient/_client.py
IFlockerAPIV1Client.create_container
python
def create_container(node_uuid, name, image, volumes=None):
:param UUID node_uuid: The ``UUID`` of the node where the container will be started. :param unicode name: The name to assign to the container. :param DockerImage image: The Docker image which the container will run. :param Optional[Sequence[MountedDataset]] volumes: Volumes to mount on container. :return: ``Deferred`` firing with the configured ``Container`` or ``ContainerAlreadyExists`` if the supplied container name already exists.
https://github.com/clusterhq/flocker/blob/eaa586248986d7cd681c99c948546c2b507e44de/flocker/apiclient/_client.py#L355-L368
from uuid import UUID, uuid4 from json import dumps from datetime import datetime from os import environ from ipaddr import IPv4Address, IPv6Address, IPAddress from pytz import UTC from zope.interface import Interface, implementer from pyrsistent import PClass, field, pmap_field, pmap from eliot import ActionType, Field from eliot.twisted import DeferredContext from twisted.internet.defer import succeed, fail from twisted.python.filepath import FilePath from twisted.web.http import ( CREATED, OK, CONFLICT, NOT_FOUND, PRECONDITION_FAILED, ) from twisted.internet.utils import getProcessOutput from twisted.internet.task import deferLater from treq import json_content, content from ..ca import treq_with_authentication from ..control import Leases as LeasesModel, LeaseError, DockerImage from ..common import retry_failure from .. import __version__ _LOG_HTTP_REQUEST = ActionType( "flocker:apiclient:http_request", [Field.forTypes("url", [bytes, unicode], "Request URL."), Field.forTypes("method", [bytes, unicode], "Request method."), Field("request_body", lambda o: o, "Request JSON body.")], [Field.forTypes("response_code", [int], "Response code."), Field("response_body", lambda o: o, "JSON response body.")], "A HTTP request.") _LOG_CONDITIONAL_CREATE = ActionType( u"flocker:apiclient:conditional_create", [], [], u"Conditionally create a dataset.") NoneType = type(None) class ServerResponseMissingElementError(Exception): def __init__(self, key, response): message = u'{!r} not found in {!r}'.format(key, response) Exception.__init__(self, message) class Dataset(PClass): dataset_id = field(type=UUID, mandatory=True) primary = field(type=UUID, mandatory=True) maximum_size = field(type=(int, NoneType), mandatory=True) metadata = pmap_field(unicode, unicode) class DatasetState(PClass): dataset_id = field(type=UUID, mandatory=True) primary = field(type=(UUID, NoneType), mandatory=True) maximum_size = field(type=(int, NoneType), mandatory=True) path = field(type=(FilePath, NoneType), mandatory=True) class Lease(PClass): dataset_id = field(type=UUID, mandatory=True) node_uuid = field(type=UUID, mandatory=True) expires = field(type=(float, int, NoneType), mandatory=True) class MountedDataset(PClass): dataset_id = field(type=UUID, mandatory=True) mountpoint = field(type=unicode, mandatory=True) def _parse_volumes(data_list): if data_list: return [ MountedDataset( dataset_id=UUID(data[u'dataset_id']), mountpoint=data[u'mountpoint'], ) for data in data_list ] else: return None class Container(PClass): node_uuid = field(type=UUID, mandatory=True) name = field(type=unicode, mandatory=True) image = field(type=DockerImage, mandatory=True) volumes = field(initial=None) class ContainerState(PClass): node_uuid = field(type=UUID, mandatory=True) name = field(type=unicode, mandatory=True) image = field(type=DockerImage, mandatory=True) running = field(type=bool, mandatory=True) volumes = field(initial=None, mandatory=True) class Node(PClass): uuid = field(type=UUID, mandatory=True) public_address = field( type=(IPv4Address, IPv6Address), mandatory=True, ) class DatasetAlreadyExists(Exception): class LeaseAlreadyHeld(Exception): class ContainerAlreadyExists(Exception): class ConfigurationChanged(Exception): class DatasetsConfiguration(PClass): tag = field(mandatory=True) datasets = pmap_field(UUID, Dataset) def __iter__(self): return self.datasets.itervalues() class IFlockerAPIV1Client(Interface): def create_dataset(primary, maximum_size=None, dataset_id=None, metadata=pmap(), configuration_tag=None): def move_dataset(primary, dataset_id, configuration_tag=None): def delete_dataset(dataset_id, configuration_tag=None): def list_datasets_configuration(): def list_datasets_state(): def acquire_lease(dataset_id, node_uuid, expires): def release_lease(dataset_id): def list_leases(): def version(): def list_nodes():
Apache License 2.0
prompt-toolkit/ptterm
ptterm/process.py
Process.write_key
python
def write_key(self, key): data = prompt_toolkit_key_to_vt100_key( key, application_mode=self.screen.in_application_mode) self.write_input(data)
Write prompt_toolkit Key.
https://github.com/prompt-toolkit/ptterm/blob/e104399e67628809511493a9d2423a9b13ffaf46/ptterm/process.py#L128-L134
from __future__ import unicode_literals from prompt_toolkit.document import Document from prompt_toolkit.eventloop import get_event_loop from prompt_toolkit.utils import is_windows from six.moves import range from six import text_type from .key_mappings import prompt_toolkit_key_to_vt100_key from .screen import BetterScreen from .stream import BetterStream import os import time __all__ = ( 'Process', ) def create_terminal(command, before_exec_func): if is_windows(): from .backends.win32 import Win32Terminal return Win32Terminal() else: from .backends.posix import PosixTerminal return PosixTerminal.from_command(command, before_exec_func=before_exec_func) class Process(object): def __init__(self, invalidate, command=None, before_exec_func=None, bell_func=None, done_callback=None, has_priority=None): assert callable(invalidate) assert bell_func is None or callable(bell_func) assert done_callback is None or callable(done_callback) assert has_priority is None or callable(has_priority) self.loop = get_event_loop() self.invalidate = invalidate self.done_callback = done_callback self.has_priority = has_priority or (lambda: True) self.suspended = False self._reader_connected = False self.terminal = create_terminal(command, before_exec_func=before_exec_func) self.terminal.add_input_ready_callback(self._read) if done_callback is not None: self.terminal.ready_f.add_done_callback(lambda _: done_callback()) self.sx = 0 self.sy = 0 self.screen = BetterScreen(self.sx, self.sy, write_process_input=self.write_input, bell_func=bell_func) self.stream = BetterStream(self.screen) self.stream.attach(self.screen) def start(self): self.set_size(120, 24) self.terminal.start() self.terminal.connect_reader() def set_size(self, width, height): assert isinstance(width, int) assert isinstance(height, int) if (self.sx, self.sy) != (width, height): self.terminal.set_size(width, height) self.screen.resize(lines=height, columns=width) self.screen.lines = height self.screen.columns = width self.sx = width self.sy = height def write_input(self, data, paste=False): if paste and self.screen.bracketed_paste_enabled: data = '\x1b[200~' + data + '\x1b[201~' self.terminal.write_text(data)
BSD 3-Clause New or Revised License
flipperpa/peregrine
peregrine/templatetags/peregrine.py
settings_value
python
def settings_value(name): return getattr(settings, name, None)
Allows a value from Django's settings to be included in a template tag.
https://github.com/flipperpa/peregrine/blob/1ba17753e83f84b38d0dc3bb8bc9f862e48372a7/peregrine/templatetags/peregrine.py#L88-L92
from django import template from django.conf import settings from wagtail.core.models import Site from ..models import SitePost register = template.Library() @register.simple_tag(takes_context=True) def get_site_root(context): return Site.find_for_request(context["request"]).root_page def has_menu_children(page): return page.get_children().live().in_menu().exists() @register.inclusion_tag("peregrine/tags/top_menu.html", takes_context=True) def top_menu(context, parent, calling_page=None): menu_items = ( parent.get_children() .filter( live=True, show_in_menus=True, ) .exclude( content_type__model="sitepost", ) ) for menu_item in menu_items: menu_item.show_dropdown = has_menu_children(menu_item) return { "calling_page": calling_page, "menu_items": menu_items, "request": context["request"], } @register.inclusion_tag("peregrine/tags/top_menu_children.html", takes_context=True) def top_menu_children(context, parent): menu_items_children = parent.get_children() menu_items_children = menu_items_children.live().in_menu() return { "parent": parent, "menu_items_children": menu_items_children, "request": context["request"], } @register.inclusion_tag("peregrine/tags/top_menu_posts.html", takes_context=True) def top_menu_posts(context): posts = SitePost.objects.filter(live=True, show_in_menus=True,).order_by( "-post_date", )[:10] show_posts = posts.count() > 0 return { "posts": posts, "show_posts": show_posts, } @register.simple_tag
BSD 3-Clause New or Revised License
vs-uulm/nemesys
src/nemere/visualization/simplePrint.py
resolveIdx2Seg
python
def resolveIdx2Seg(dc: DistanceCalculator, segseq: Sequence[Sequence[int]]): print(tabulate([[dc.segments[s].bytes.hex() if s != -1 else None for s in m] for m in segseq], disable_numparse=True, headers=range(len(segseq[0]))))
Prints tabulated hex representations of (aligned) sequences of indices. :param dc: DistanceCalculator to use for resolving indices to MessageSegment objects. :param segseq: list of segment indices (from raw segment list) per message.
https://github.com/vs-uulm/nemesys/blob/4087944180b55f2e33f474b4785c03755f42a858/src/nemere/visualization/simplePrint.py#L69-L77
from itertools import chain from time import strftime from typing import Tuple, Iterable, Sequence, Dict, List, Union from tabulate import tabulate from netzob.Common.Utils.MatrixList import MatrixList from netzob.Model.Vocabulary.Messages.AbstractMessage import AbstractMessage from nemere.inference.segments import MessageSegment from nemere.inference.templates import DistanceCalculator, Template from nemere.validation.dissectorMatcher import MessageComparator from nemere.visualization import bcolors as bcolors def printMatrix(lines: Iterable[Iterable], headers: Iterable=None): ml = MatrixList() if headers: ml.headers = headers strlines = [ [ "{:0.3f}".format(cell) if isinstance(cell, float) else str(cell) for cell in row] for row in lines ] ml.extend(strlines) print(ml) def alignDescreteValues(listA: list, listB: list) -> Tuple[list, list]: rest = listB.copy() newA = list() newB = list() for valA in listA: consume = 0 while len(rest) > consume and rest[consume] <= valA: consume += 1 if consume == 0: newA.append(valA) newB.append(None) if consume > 0: newA.extend([None]*(consume-1) + [valA]) newB.extend(rest[:consume]) rest = rest[consume:] if len(rest) > 0: newA.extend([None]*len(rest)) newB.extend(rest) return newA, newB def tabuSeqOfSeg(sequence: Sequence[Sequence[MessageSegment]]): print(tabulate(((sg.bytes.hex() if sg is not None else '' for sg in msg) for msg in sequence), headers=range(len(sequence[0])), showindex="always", disable_numparse=True))
MIT License
mikeshardmind/sinbadcogs
quotetools/quotetools.py
QuoteTools.quote
python
async def quote( self, ctx, channels: commands.Greedy[GlobalTextChannel] = None, *messageids: int ): if not messageids or not channels: return await ctx.send_help() chans = [c.matched_channel for c in channels] msgs = await find_messages(ctx, messageids, chans) if not msgs: return await ctx.maybe_send_embed("No matching message found.") for m in msgs: if await ctx.embed_requested(): em = embed_from_msg(m) await ctx.send(embed=em) else: msg1 = "\n".join( [ f"Author: {m.author}({m.author.id})", f"Channel: <#{m.channel.id}>", f"Time(UTC): {m.created_at.isoformat()}", ] ) if len(msg1) + len(m.clean_content) < 2000: await ctx.send(msg1 + m.clean_content) else: await ctx.send(msg1) await ctx.send(m.clean_content)
gets (a) message(s) by ID(s) User must be able to see the message(s) You need to specify specific channels to search (by ID or mention only!)
https://github.com/mikeshardmind/sinbadcogs/blob/e9353fb63f18f5c2025e177f89b028aa7ac7a63d/quotetools/quotetools.py#L72-L108
from __future__ import annotations import re from typing import NamedTuple import discord from redbot.core import commands from .helpers import embed_from_msg, find_messages CHANNEL_RE = re.compile(r"^<#(\d{15,21})>$|^(\d{15,21})$") class GlobalTextChannel(NamedTuple): matched_channel: discord.TextChannel @classmethod async def convert(cls, ctx: commands.Context, argument: str): bot = ctx.bot match = CHANNEL_RE.match(argument) channel = None if match: idx = next(filter(None, match.groups()), None) if idx: channel_id = int(idx) channel = bot.get_channel(channel_id) if not channel or not isinstance(channel, discord.TextChannel): raise commands.BadArgument('Channel "{}" not found.'.format(argument)) return cls(channel) class QuoteTools(commands.Cog): __author__ = "mikeshardmind(Sinbad)" __version__ = "2021.03" async def red_delete_data_for_user(self, **kwargs): return def format_help_for_context(self, ctx): pre_processed = super().format_help_for_context(ctx) return f"{pre_processed}\nCog Version: {self.__version__}" def __init__(self, bot, *args, **kwargs): super().__init__(*args, **kwargs) self.bot = bot @commands.command()
Apache License 2.0
svn2github/gyp
pylib/gyp/generator/ninja.py
NinjaWriter.WriteActionsRulesCopies
python
def WriteActionsRulesCopies(self, spec, extra_sources, prebuild, mac_bundle_depends): outputs = [] if self.is_mac_bundle: mac_bundle_resources = spec.get('mac_bundle_resources', [])[:] else: mac_bundle_resources = [] extra_mac_bundle_resources = [] if 'actions' in spec: outputs += self.WriteActions(spec['actions'], extra_sources, prebuild, extra_mac_bundle_resources) if 'rules' in spec: outputs += self.WriteRules(spec['rules'], extra_sources, prebuild, mac_bundle_resources, extra_mac_bundle_resources) if 'copies' in spec: outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends) if 'sources' in spec and self.flavor == 'win': outputs += self.WriteWinIdlFiles(spec, prebuild) stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs) if self.is_mac_bundle: xcassets = self.WriteMacBundleResources( extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends) partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends) self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends) return stamp
Write out the Actions, Rules, and Copies steps. Return a path representing the outputs of these steps.
https://github.com/svn2github/gyp/blob/e0ee72ddc7fb97eb33d530cf684efcbe4d27ecb3/pylib/gyp/generator/ninja.py#L540-L572
import collections import copy import hashlib import json import multiprocessing import os.path import re import signal import subprocess import sys import gyp import gyp.common from gyp.common import OrderedSet import gyp.msvs_emulation import gyp.MSVSUtil as MSVSUtil import gyp.xcode_emulation from cStringIO import StringIO from gyp.common import GetEnvironFallback import gyp.ninja_syntax as ninja_syntax generator_default_variables = { 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '', 'STATIC_LIB_PREFIX': 'lib', 'STATIC_LIB_SUFFIX': '.a', 'SHARED_LIB_PREFIX': 'lib', 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen', 'PRODUCT_DIR': '$!PRODUCT_DIR', 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME', 'RULE_INPUT_ROOT': '${root}', 'RULE_INPUT_DIRNAME': '${dirname}', 'RULE_INPUT_PATH': '${source}', 'RULE_INPUT_EXT': '${ext}', 'RULE_INPUT_NAME': '${name}', } generator_additional_non_configuration_keys = [] generator_additional_path_sections = [] generator_extra_sources_for_rules = [] generator_filelist_paths = None generator_supports_multiple_toolsets = ( os.environ.get('GYP_CROSSCOMPILE') or os.environ.get('AR_host') or os.environ.get('CC_host') or os.environ.get('CXX_host') or os.environ.get('AR_target') or os.environ.get('CC_target') or os.environ.get('CXX_target')) def StripPrefix(arg, prefix): if arg.startswith(prefix): return arg[len(prefix):] return arg def QuoteShellArgument(arg, flavor): if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg): return arg if flavor == 'win': return gyp.msvs_emulation.QuoteForRspFile(arg) return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" def Define(d, flavor): if flavor == 'win': d = d.replace('#', '\\%03o' % ord('#')) return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor) def AddArch(output, arch): output, extension = os.path.splitext(output) return '%s.%s%s' % (output, arch, extension) class Target(object): def __init__(self, type): self.type = type self.preaction_stamp = None self.precompile_stamp = None self.actions_stamp = None self.binary = None self.bundle = None self.component_objs = None self.import_lib = None def Linkable(self): return self.type in ('static_library', 'shared_library') def UsesToc(self, flavor): if flavor == 'win' or self.bundle: return False return self.type in ('shared_library', 'loadable_module') def PreActionInput(self, flavor): if self.UsesToc(flavor): return self.FinalOutput() + '.TOC' return self.FinalOutput() or self.preaction_stamp def PreCompileInput(self): return self.actions_stamp or self.precompile_stamp def FinalOutput(self): return self.bundle or self.binary or self.actions_stamp class NinjaWriter(object): def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir, output_file, toplevel_build, output_file_name, flavor, toplevel_dir=None): self.hash_for_rules = hash_for_rules self.target_outputs = target_outputs self.base_dir = base_dir self.build_dir = build_dir self.ninja = ninja_syntax.Writer(output_file) self.toplevel_build = toplevel_build self.output_file_name = output_file_name self.flavor = flavor self.abs_build_dir = None if toplevel_dir is not None: self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir)) self.obj_ext = '.obj' if flavor == 'win' else '.o' if flavor == 'win': self.win_env = {} for arch in ('x86', 'x64'): self.win_env[arch] = 'environment.' + arch build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) self.build_to_base = os.path.join(build_to_top, base_dir) base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) self.base_to_build = os.path.join(base_to_top, build_dir) def ExpandSpecial(self, path, product_dir=None): PRODUCT_DIR = '$!PRODUCT_DIR' if PRODUCT_DIR in path: if product_dir: path = path.replace(PRODUCT_DIR, product_dir) else: path = path.replace(PRODUCT_DIR + '/', '') path = path.replace(PRODUCT_DIR + '\\', '') path = path.replace(PRODUCT_DIR, '.') INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR' if INTERMEDIATE_DIR in path: int_dir = self.GypPathToUniqueOutput('gen') path = path.replace(INTERMEDIATE_DIR, os.path.join(product_dir or '', int_dir)) CONFIGURATION_NAME = '$|CONFIGURATION_NAME' path = path.replace(CONFIGURATION_NAME, self.config_name) return path def ExpandRuleVariables(self, path, root, dirname, source, ext, name): if self.flavor == 'win': path = self.msvs_settings.ConvertVSMacros( path, config=self.config_name) path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root) path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'], dirname) path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source) path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext) path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name) return path def GypPathToNinja(self, path, env=None): if env: if self.flavor == 'mac': path = gyp.xcode_emulation.ExpandEnvVars(path, env) elif self.flavor == 'win': path = gyp.msvs_emulation.ExpandMacros(path, env) if path.startswith('$!'): expanded = self.ExpandSpecial(path) if self.flavor == 'win': expanded = os.path.normpath(expanded) return expanded if '$|' in path: path = self.ExpandSpecial(path) assert '$' not in path, path return os.path.normpath(os.path.join(self.build_to_base, path)) def GypPathToUniqueOutput(self, path, qualified=True): path = self.ExpandSpecial(path) assert not path.startswith('$'), path obj = 'obj' if self.toolset != 'target': obj += '.' + self.toolset path_dir, path_basename = os.path.split(path) if qualified: path_basename = self.name + '.' + path_basename return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, path_basename)) def WriteCollapsedDependencies(self, name, targets, order_only=None): assert targets == filter(None, targets), targets if len(targets) == 0: assert not order_only return None if len(targets) > 1 or order_only: stamp = self.GypPathToUniqueOutput(name + '.stamp') targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only) self.ninja.newline() return targets[0] def _SubninjaNameForArch(self, arch): output_file_base = os.path.splitext(self.output_file_name)[0] return '%s.%s.ninja' % (output_file_base, arch) def WriteSpec(self, spec, config_name, generator_flags): self.config_name = config_name self.name = spec['target_name'] self.toolset = spec['toolset'] config = spec['configurations'][config_name] self.target = Target(spec['type']) self.is_standalone_static_library = bool( spec.get('standalone_static_library', 0)) self.uses_cpp = False self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) self.xcode_settings = self.msvs_settings = None if self.flavor == 'mac': self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) if self.flavor == 'win': self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags) arch = self.msvs_settings.GetArch(config_name) self.ninja.variable('arch', self.win_env[arch]) self.ninja.variable('cc', '$cl_' + arch) self.ninja.variable('cxx', '$cl_' + arch) self.ninja.variable('cc_host', '$cl_' + arch) self.ninja.variable('cxx_host', '$cl_' + arch) if self.flavor == 'mac': self.archs = self.xcode_settings.GetActiveArchs(config_name) if len(self.archs) > 1: self.arch_subninjas = dict( (arch, ninja_syntax.Writer( OpenOutput(os.path.join(self.toplevel_build, self._SubninjaNameForArch(arch)), 'w'))) for arch in self.archs) actions_depends = [] compile_depends = [] if 'dependencies' in spec: for dep in spec['dependencies']: if dep in self.target_outputs: target = self.target_outputs[dep] actions_depends.append(target.PreActionInput(self.flavor)) compile_depends.append(target.PreCompileInput()) actions_depends = filter(None, actions_depends) compile_depends = filter(None, compile_depends) actions_depends = self.WriteCollapsedDependencies('actions_depends', actions_depends) compile_depends = self.WriteCollapsedDependencies('compile_depends', compile_depends) self.target.preaction_stamp = actions_depends self.target.precompile_stamp = compile_depends extra_sources = [] mac_bundle_depends = [] self.target.actions_stamp = self.WriteActionsRulesCopies( spec, extra_sources, actions_depends, mac_bundle_depends) compile_depends_stamp = (self.target.actions_stamp or compile_depends) link_deps = [] sources = extra_sources + spec.get('sources', []) if sources: if self.flavor == 'mac' and len(self.archs) > 1: for arch in self.archs: self.ninja.subninja(self._SubninjaNameForArch(arch)) pch = None if self.flavor == 'win': gyp.msvs_emulation.VerifyMissingSources( sources, self.abs_build_dir, generator_flags, self.GypPathToNinja) pch = gyp.msvs_emulation.PrecompiledHeader( self.msvs_settings, config_name, self.GypPathToNinja, self.GypPathToUniqueOutput, self.obj_ext) else: pch = gyp.xcode_emulation.MacPrefixHeader( self.xcode_settings, self.GypPathToNinja, lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)) link_deps = self.WriteSources( self.ninja, config_name, config, sources, compile_depends_stamp, pch, spec) obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] if obj_outputs: if self.flavor != 'mac' or len(self.archs) == 1: link_deps += [self.GypPathToNinja(o) for o in obj_outputs] else: print "Warning: Actions/rules writing object files don't work with " "multiarch targets, dropping. (target %s)" % spec['target_name'] elif self.flavor == 'mac' and len(self.archs) > 1: link_deps = collections.defaultdict(list) if self.flavor == 'win' and self.target.type == 'static_library': self.target.component_objs = link_deps output = None is_empty_bundle = not link_deps and not mac_bundle_depends if link_deps or self.target.actions_stamp or actions_depends: output = self.WriteTarget(spec, config_name, config, link_deps, self.target.actions_stamp or actions_depends) if self.is_mac_bundle: mac_bundle_depends.append(output) if self.is_mac_bundle: output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle) if not output: return None assert self.target.FinalOutput(), output return self.target def _WinIdlRule(self, source, prebuild, outputs): outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData( source, self.config_name) outdir = self.GypPathToNinja(outdir) def fix_path(path, rel=None): path = os.path.join(outdir, path) dirname, basename = os.path.split(source) root, ext = os.path.splitext(basename) path = self.ExpandRuleVariables( path, root, dirname, source, ext, basename) if rel: path = os.path.relpath(path, rel) return path vars = [(name, fix_path(value, outdir)) for name, value in vars] output = [fix_path(p) for p in output] vars.append(('outdir', outdir)) vars.append(('idlflags', flags)) input = self.GypPathToNinja(source) self.ninja.build(output, 'idl', input, variables=vars, order_only=prebuild) outputs.extend(output) def WriteWinIdlFiles(self, spec, prebuild): assert self.flavor == 'win' if self.msvs_settings.HasExplicitIdlRulesOrActions(spec): return [] outputs = [] for source in filter(lambda x: x.endswith('.idl'), spec['sources']): self._WinIdlRule(source, prebuild, outputs) return outputs
BSD 3-Clause New or Revised License
national-voter-file/national-voter-file
src/python/national_voter_file/transformers/base.py
BaseTransformer.extract_birthdate
python
def extract_birthdate(self, input_dict): raise NotImplementedError('Must implement extract_birthdate method')
Inputs: input_dict: names of columns and corresponding values Outputs: Dictionary with following keys 'BIRTHDATE' 'BIRTHDATE_IS_ESTIMATE'
https://github.com/national-voter-file/national-voter-file/blob/f8bae42418c9307150d10c9e71174defaefa4e60/src/python/national_voter_file/transformers/base.py#L746-L755
import os import csv import datetime from collections import defaultdict from io import TextIOWrapper import zipfile from functools import wraps import usaddress DATA_DIR = os.path.join(os.path.abspath(os.getcwd()), 'data') class BasePreparer(object): sep = ',' default_file = 'input.csv' state_name = '' def __init__(self, input_path, state_path=None, state_module=None, transformer=None, history=False): if transformer: self.transformer = transformer if state_path: self.state_path = state_path else: self.state_path = self.state_name if state_module: self.default_file = state_module.default_file self.history = history filename = self.default_file if os.path.isdir(input_path): state_subdir_guess = os.path.join(input_path, self.state_path, filename) state_file_guess = os.path.join(input_path, filename) if os.path.isfile(state_subdir_guess): input_path = state_subdir_guess elif os.path.isfile(state_file_guess): input_path = state_file_guess else: raise Exception("please include the input file path {}".format( (""" or make sure your file is called "{filename}" in {datadir} or in {datadir}/{state}/{filename} """.format(filename=filename, datadir=input_path, state=self.state_path)) if filename else '' )) self.input_path = input_path def open(self, path_or_handle, mode='r'): if mode == 'r' and isinstance(path_or_handle, zipfile.ZipExtFile): return TextIOWrapper(path_or_handle, encoding='utf8', errors='ignore', line_buffering=True) elif hasattr(path_or_handle, 'mode'): return path_or_handle else: return open(path_or_handle, mode, errors='ignore') def process(self): return self.dict_iterator(self.open(self.input_path)) def dict_iterator(self, infile): reader = csv.DictReader(infile, delimiter=self.sep, fieldnames=self.transformer.input_fields) return reader class BaseTransformer(object): col_type_dict = { 'TITLE': set([str, type(None)]), 'FIRST_NAME': set([str, type(None)]), 'MIDDLE_NAME': set([str, type(None)]), 'LAST_NAME': set([str, type(None)]), 'NAME_SUFFIX': set([str, type(None)]), 'GENDER': set([str, type(None)]), 'RACE':set([str, type(None)]), 'BIRTHDATE': set([datetime.date, type(None)]), 'BIRTHDATE_IS_ESTIMATE':set([str]), 'BIRTH_STATE':set([str, type(None)]), 'LANGUAGE_CHOICE': set([str, type(None)]), 'EMAIL': set([str, type(None)]), 'PHONE': set([str, type(None)]), 'DO_NOT_CALL_STATUS': set([str, type(None)]), 'ADDRESS_NUMBER': set([str, type(None)]), 'ADDRESS_NUMBER_PREFIX': set([str, type(None)]), 'ADDRESS_NUMBER_SUFFIX': set([str, type(None)]), 'BUILDING_NAME': set([str, type(None)]), 'CORNER_OF': set([str, type(None)]), 'INTERSECTION_SEPARATOR': set([str, type(None)]), 'LANDMARK_NAME': set([str, type(None)]), 'NOT_ADDRESS': set([str, type(None)]), 'OCCUPANCY_TYPE': set([str, type(None)]), 'OCCUPANCY_IDENTIFIER': set([str, type(None)]), 'PLACE_NAME': set([str, type(None)]), 'STATE_NAME': set([str]), 'STREET_NAME': set([str, type(None)]), 'STREET_NAME_PRE_DIRECTIONAL': set([str, type(None)]), 'STREET_NAME_PRE_MODIFIER': set([str, type(None)]), 'STREET_NAME_PRE_TYPE': set([str, type(None)]), 'STREET_NAME_POST_DIRECTIONAL': set([str, type(None)]), 'STREET_NAME_POST_MODIFIER': set([str, type(None)]), 'STREET_NAME_POST_TYPE': set([str, type(None)]), 'SUBADDRESS_IDENTIFIER': set([str, type(None)]), 'SUBADDRESS_TYPE': set([str, type(None)]), 'USPS_BOX_GROUP_ID': set([str, type(None)]), 'USPS_BOX_GROUP_TYPE': set([str, type(None)]), 'USPS_BOX_ID': set([str, type(None)]), 'USPS_BOX_TYPE': set([str, type(None)]), 'ZIP_CODE': set([str, type(None)]), 'MAIL_ADDRESS_LINE1': set([str, type(None)]), 'MAIL_ADDRESS_LINE2': set([str, type(None)]), 'MAIL_CITY': set([str, type(None)]), 'MAIL_STATE': set([str, type(None)]), 'MAIL_ZIP_CODE': set([str, type(None)]), 'MAIL_COUNTRY': set([str, type(None)]), 'COUNTYCODE': set([str]), 'STATE_VOTER_REF': set([str]), 'COUNTY_VOTER_REF': set([str, type(None)]), 'REGISTRATION_DATE': set([datetime.date, type(None)]), 'REGISTRATION_STATUS': set([str, type(None)]), 'ABSENTEE_TYPE': set([str, type(None)]), 'PARTY': set([str, type(None)]), 'CONGRESSIONAL_DIST': set([str, type(None)]), 'UPPER_HOUSE_DIST': set([str, type(None)]), 'LOWER_HOUSE_DIST': set([str, type(None)]), 'PRECINCT': set([str, type(None)]), 'COUNTY_BOARD_DIST': set([str, type(None)]), 'SCHOOL_BOARD_DIST': set([str, type(None)]), 'PRECINCT_SPLIT': set([str, type(None)]), 'RAW_ADDR1': set([str, type(None)]), 'RAW_ADDR2': set([str, type(None)]), 'RAW_CITY': set([str, type(None)]), 'RAW_ZIP': set([str, type(None)]), 'VALIDATION_STATUS':set([str, type(str)]) } limited_value_dict = { 'PARTY': set([ 'DEM', 'REP', "AI", "PF", "CP", "AMC", "GRN", "LIB", "ECO", "IDP", "PSL", "REF", "SAP", "CON", "WOR", "WEP", "SCC", "NLP", "SP", "SWP", "UTY", "AE", "AMP", "OTH", "UN", "DEL", "FED", "CIT", 'LIB', "NEW", "ALI", "TAX", 'STS', 'UNI', 'IND', 'IPU', 'GPU', 'PCP', 'ROL', 'BLU', 'UJP', 'FDM', 'RTH', 'TXP', 'APP', type(None) ]), 'GENDER': set(['M', 'F', 'U']), 'RACE':set([ 'I', 'A', 'B', 'H', 'W', 'O', 'M', "U" ]), 'VALIDATION_STATUS':set([ '1', '2', '3', '4', '5' ]) } usaddress_to_standard_colnames_dict = { 'AddressNumber': 'ADDRESS_NUMBER', 'AddressNumberPrefix': 'ADDRESS_NUMBER_PREFIX', 'AddressNumberSuffix': 'ADDRESS_NUMBER_SUFFIX', 'BuildingName': 'BUILDING_NAME', 'CornerOf': 'CORNER_OF', 'IntersectionSeparator': 'INTERSECTION_SEPARATOR', 'LandmarkName': 'LANDMARK_NAME', 'NotAddress': 'NOT_ADDRESS', 'OccupancyType': 'OCCUPANCY_TYPE', 'OccupancyIdentifier': 'OCCUPANCY_IDENTIFIER', 'PlaceName': 'PLACE_NAME', 'StateName': 'STATE_NAME', 'StreetName': 'STREET_NAME', 'StreetNamePreDirectional': 'STREET_NAME_PRE_DIRECTIONAL', 'StreetNamePreModifier': 'STREET_NAME_PRE_MODIFIER', 'StreetNamePreType': 'STREET_NAME_PRE_TYPE', 'StreetNamePostDirectional': 'STREET_NAME_POST_DIRECTIONAL', 'StreetNamePostModifier': 'STREET_NAME_POST_MODIFIER', 'StreetNamePostType': 'STREET_NAME_POST_TYPE', 'SubaddressIdentifier': 'SUBADDRESS_IDENTIFIER', 'SubaddressType': 'SUBADDRESS_TYPE', 'USPSBoxGroupID': 'USPS_BOX_GROUP_ID', 'USPSBoxGroupType': 'USPS_BOX_GROUP_TYPE', 'USPSBoxID': 'USPS_BOX_ID', 'USPSBoxType': 'USPS_BOX_TYPE', 'ZipCode': 'ZIP_CODE', } date_format = '' col_map = {} input_fields = [] history_type_dict = { 'STATE_VOTER_REF': set([str]), 'ELECTION_DATE': set([datetime.date]), 'ELECTION_TYPE': set([str, type(None)]), 'VOTE_METHOD': set([str, type(None)]) } def check_col_map(col_list): def extract_decorator(f): @wraps(f) def wrapped(self, input_dict): if all(col in self.col_map for col in col_list): return { (c, input_dict.get(self.col_map[c])) for c in col_list } else: raise NotImplementedError( 'Must implement {} method or include {} in col_map'.format( f.__name__, ', '.join(col_list) ) ) return wrapped return extract_decorator def process_row(self, input_dict, history=False): if history: method_str = 'hist_' else: method_str = 'extract' output_dict = {} extract_funcs = [ getattr(self, x) for x in dir(self) if x.startswith(method_str) ] for func in extract_funcs: output_dict.update(func(input_dict)) return output_dict def fix_missing_mailing_addr(self, orig_dict): if('MAIL_CITY' not in orig_dict): if(orig_dict['STREET_NAME']): copied_addr = { 'MAIL_ADDRESS_LINE1': self.construct_val( orig_dict, [ 'ADDRESS_NUMBER_PREFIX', 'ADDRESS_NUMBER', 'ADDRESS_NUMBER_SUFFIX', 'STREET_NAME_PRE_DIRECTIONAL', 'STREET_NAME_PRE_MODIFIER', 'STREET_NAME_PRE_TYPE', 'STREET_NAME', 'STREET_NAME_POST_DIRECTIONAL', 'STREET_NAME_POST_MODIFIER', 'STREET_NAME_POST_TYPE' ] ), 'MAIL_ADDRESS_LINE2': self.construct_val( orig_dict, ['OCCUPANCY_TYPE', 'OCCUPANCY_IDENTIFIER'] ), 'MAIL_CITY': orig_dict['PLACE_NAME'], 'MAIL_STATE': orig_dict['STATE_NAME'], 'MAIL_ZIP_CODE': orig_dict['ZIP_CODE'], 'MAIL_COUNTRY': "USA" } else: copied_addr = { 'MAIL_ADDRESS_LINE1': orig_dict['RAW_ADDR1'], 'MAIL_ADDRESS_LINE2': orig_dict['RAW_ADDR2'], 'MAIL_CITY': orig_dict['RAW_CITY'], 'MAIL_STATE': orig_dict['STATE_NAME'], 'MAIL_ZIP_CODE': orig_dict['RAW_ZIP'], 'MAIL_COUNTRY': 'USA' } orig_dict.update(copied_addr) return orig_dict def construct_val(self, aDir, fields): result = "" for aField in fields: if(aDir[aField]): result = result + (aDir[aField].strip()+" " if aDir[aField].strip() else '') return result def constructEmptyResidentialAddress(self): return { 'ADDRESS_NUMBER': None, 'ADDRESS_NUMBER_PREFIX': None, 'ADDRESS_NUMBER_SUFFIX': None, 'BUILDING_NAME': None, 'CORNER_OF': None, 'INTERSECTION_SEPARATOR': None, 'LANDMARK_NAME': None, 'NOT_ADDRESS': None, 'OCCUPANCY_TYPE': None, 'OCCUPANCY_IDENTIFIER': None, 'PLACE_NAME': None, 'STATE_NAME': None, 'STREET_NAME': None, 'STREET_NAME_PRE_DIRECTIONAL': None, 'STREET_NAME_PRE_MODIFIER': None, 'STREET_NAME_PRE_TYPE': None, 'STREET_NAME_POST_DIRECTIONAL': None, 'STREET_NAME_POST_MODIFIER': None, 'STREET_NAME_POST_TYPE': None, 'SUBADDRESS_IDENTIFIER': None, 'SUBADDRESS_TYPE': None, 'USPS_BOX_GROUP_ID': None, 'USPS_BOX_GROUP_TYPE': None, 'USPS_BOX_ID': None, 'USPS_BOX_TYPE': None, 'ZIP_CODE': None } @classmethod def validate_output_row(cls, output_dict, history=False): if history: type_dict = cls.history_type_dict else: type_dict = cls.col_type_dict correct_output_col_set = set(type_dict.keys()) output_dict_col_set = set(output_dict.keys()) missing_cols = correct_output_col_set - output_dict_col_set extra_cols = output_dict_col_set - correct_output_col_set if len(missing_cols) > 0 or len(extra_cols) > 0: error_message = ( 'Column(s) {} are required but missing.\n' 'Column(s) {} are present but not required.').format( list(missing_cols), list(extra_cols), ) raise ValueError(error_message) type_errors = [] for colname, value in output_dict.items(): if isinstance(value, str): output_dict[colname] = value.strip() value_type = str if len(value.strip()) > 0 else type(None) else: value_type = type(value) acceptable_types = type_dict[colname] if value_type not in acceptable_types: type_errors.append( 'Column {} requires type(s) {}, found {}.'.format( colname, list(acceptable_types), value_type, ) ) if len(type_errors) > 0: error_str = '\n'.join(sorted(type_errors)) raise TypeError(error_str) if not history: value_errors = [] for col, vals in cls.limited_value_dict.items(): output_value = output_dict[col] if output_value is None and type(None) in cls.col_type_dict[col]: continue if output_value not in vals: error_message = 'Column {} requires value(s) {}, found {}'.format( col, list(vals), output_value, ) value_errors.append(error_message) if len(value_errors) > 0: error_str = '\n'.join(sorted(value_errors)) raise ValueError(error_str) def convert_date(self, date_str, date_format=None): return datetime.datetime.strptime(date_str, date_format or self.date_format).date() def usaddress_tag(self, address_str): try: usaddress_dict, usaddress_type = usaddress.tag(address_str) if 'USPSBoxID' in usaddress_dict: usaddress_type = 'PO Box' else: usaddress_type = 'Street Address' return usaddress_dict, usaddress_type except usaddress.RepeatedLabelError as e: return None, None def convert_usaddress_dict(self, usaddress_dict): address_dict = {} for k, v in self.usaddress_to_standard_colnames_dict.items(): address_dict[v] = usaddress_dict.get(k, None) return address_dict def construct_mail_address_1(self, usaddress_dict, usaddress_type): if usaddress_type == 'Street Address': cols = [ 'AddressNumberPrefix', 'AddressNumber', 'AddressNumberSuffix', 'StreetNamePreDirectional', 'StreetNamePreModifier', 'StreetNamePreType', 'StreetName', 'StreetNamePostType', 'StreetNamePostModifier', 'StreetNamePostDirectional', ] elif usaddress_type == 'PO Box': cols = [ 'USPSBoxType', 'USPSBoxID', ] elif usaddress_type in ['Ambiguous', 'Intersection']: return " " output_vals = [ usaddress_dict[x] for x in cols if x in usaddress_dict ] if len(output_vals) > 0: return ' '.join(output_vals) else: return " " def construct_mail_address_2(self, usaddress_dict): cols = ['OccupancyType', 'OccupancyIdentifier'] output_vals = [ usaddress_dict[x] for x in cols if x in usaddress_dict ] if len(output_vals) > 0: return ' '.join(output_vals) else: return " " def flag_empty_field(self, field_val): return field_val if field_val else "XXX-MISSING-XXX" @check_col_map(['TITLE', 'FIRST_NAME', 'MIDDLE_NAME', 'LAST_NAME', 'NAME_SUFFIX']) def extract_name(self, input_dict): raise NotImplementedError('Must implement extract_name method') @check_col_map(['EMAIL']) def extract_email(self, input_dict): raise NotImplementedError('Must implement extract_email method') @check_col_map(['PHONE']) def extract_phone_number(self, input_dict): raise NotImplementedError('Must implement extract_phone_number method') @check_col_map(['DO_NOT_CALL_STATUS']) def extract_do_not_call_status(self, input_dict): raise NotImplementedError('Must implement extract_do_not_call_status method') @check_col_map(['GENDER']) def extract_gender(self, input_dict): raise NotImplementedError('Must implement extract_gender method') @check_col_map(['RACE']) def extract_race(self, input_dict): raise NotImplementedError('Must implement extract_race method') @check_col_map(['BIRTH_STATE']) def extract_birth_state(self, input_dict): raise NotImplementedError('Must implement extract_birth_state method')
MIT License
philgyford/foursquare-feeds
generate_feeds.py
FeedGenerator._get_all_checkins
python
def _get_all_checkins(self): offset = 0 checkins = [] total_checkins = 9999999999 while offset < total_checkins: results = self._get_checkins_from_api(offset) if offset == 0: total_checkins = results["checkins"]["count"] plural = "" if total_checkins == 1 else "s" logger.debug("{} checkin{} to fetch".format(total_checkins, plural)) logger.debug("Fetched {}-{}".format(offset + 1, offset + 250)) checkins += results["checkins"]["items"] offset += 250 return checkins
Make multiple requests to the API to get ALL checkins.
https://github.com/philgyford/foursquare-feeds/blob/b4d2f5642846c54fddd467696a6a9fc7af7e51e5/generate_feeds.py#L79-L101
import argparse import configparser from datetime import datetime import logging import os import pytz from xml.sax.saxutils import escape as xml_escape import foursquare from ics import Calendar, Event import simplekml logging.basicConfig(level=logging.INFO, format="%(message)s") logger = logging.getLogger(__name__) current_dir = os.path.realpath(os.path.dirname(__file__)) CONFIG_FILE = os.path.join(current_dir, "config.ini") VALID_KINDS = ["ics", "kml"] class FeedGenerator: fetch = "recent" def __init__(self, fetch="recent"): self.fetch = fetch self._load_config(CONFIG_FILE) self.client = foursquare.Foursquare(access_token=self.api_access_token) def _load_config(self, config_file): config = configparser.ConfigParser() try: config.read_file(open(config_file)) except IOError: logger.critical("Can't read config file: " + config_file) exit() self.api_access_token = config.get("Foursquare", "AccessToken") self.ics_filepath = config.get("Local", "IcsFilepath") self.kml_filepath = config.get("Local", "KmlFilepath") def generate(self, kind="ics"): if kind not in VALID_KINDS: raise ValueError("kind should be one of {}.".format(", ".join(VALID_KINDS))) if self.fetch == "all": checkins = self._get_all_checkins() else: checkins = self._get_recent_checkins() plural = "" if len(checkins) == 1 else "s" logger.info("Fetched {} checkin{} from the API".format(len(checkins), plural)) if kind == "ics": filepath = self._generate_ics_file(checkins) elif kind == "kml": filepath = self._generate_kml_file(checkins) logger.info("Generated file {}".format(filepath)) exit(0) def _get_recent_checkins(self): results = self._get_checkins_from_api() return results["checkins"]["items"]
BSD 3-Clause New or Revised License
arubdesu/microsoft-recipes
Lync/MSLyncURLandUpdateInfoProvider.py
MSLyncURLandUpdateInfoProvider.sanityCheckExpectedTriggers
python
def sanityCheckExpectedTriggers(self, item): if not item.get("Trigger Condition") == ["and", "Lync"]: raise ProcessorError( "Unexpected Trigger Condition in item %s: %s" % (item["Title"], item["Trigger Condition"])) if not "Lync" in item.get("Triggers", {}): raise ProcessorError( "Missing expected MCP Trigger in item %s" % item["Title"])
Raises an exeception if the Trigger Condition or Triggers for an update don't match what we expect. Protects us if these change in the future.
https://github.com/arubdesu/microsoft-recipes/blob/677aaa3a8b82f25fbb7c130177095d6c7841a03f/Lync/MSLyncURLandUpdateInfoProvider.py#L71-L81
import plistlib import urllib2 from distutils.version import LooseVersion from operator import itemgetter from autopkglib import Processor, ProcessorError __all__ = ["MSLyncURLandUpdateInfoProvider"] CULTURE_CODE = "0409" BASE_URL = "https://www.microsoft.com/mac/autoupdate/%sUCCP14.xml" MUNKI_UPDATE_NAME = "Lync_Installer" class MSLyncURLandUpdateInfoProvider(Processor): input_variables = { "culture_code": { "required": False, "description": ("See " "http://msdn.microsoft.com/en-us/library/ee825488(v=cs.20).aspx" " for a table of CultureCodes Defaults to 0409, which " "corresponds to en-US (English - United States)"), }, "base_url": { "required": False, "description": ("Default is %s. If this is given, culture_code " "is ignored." % (BASE_URL % CULTURE_CODE)), }, "version": { "required": False, "description": "Update version number. Defaults to latest.", }, } output_variables = { "url": { "description": "URL to the latest Lync installer.", }, "pkg_name": { "description": "Name of the package within the disk image.", }, "additional_pkginfo": { "description": "Some pkginfo fields extracted from the Microsoft metadata.", }, "display_name": { "description": "The name of the package that includes the version.", }, } description = __doc__
Apache License 2.0
swisscom/ai-research-mamo-framework
paretomanager/pareto_manager_class.py
ParetoManager.__check_input_model
python
def __check_input_model(self, model): if model is None: raise TypeError( 'Argument: model must be a model derived from pytorch.') if not isinstance(model, nn.Module): raise TypeError( 'Argument: model must be a model derived from pytorch.')
A function that checks the input model
https://github.com/swisscom/ai-research-mamo-framework/blob/26d6a08c8c7e7d33ad60d7e6896b0ffeede41bc1/paretomanager/pareto_manager_class.py#L67-L75
import os import torch import warnings from torch import nn class ParetoManager(object): def __init__(self, PATH='paretoFile/'): self.path = PATH self._pareto_front = [] self._all_solutions = [] self.id_count = 1 def __check_input_solution(self, solution): if solution is None: raise TypeError( 'Argument: the input solution must be a list representing a point.') if not isinstance(solution, list): raise TypeError( 'The input solution should be a list repressenting a point!') if len(solution) == 0: raise ValueError( 'Empty list was given as input, list should not be empty!')
Apache License 2.0
codeforfrankfurt/polbotcheck
webapi/analytics/flask_api_server.py
index
python
def index(): groupings = [] candidates_by_district = db.get_candidates_grouped_by_district() for group in candidates_by_district: print(group) groupings.append(group) return jsonify({'candidates_grouped_by_district': groupings})
Return organizational entities like Landesliste Hessen and the hessian election districts as well as the candidates in them
https://github.com/codeforfrankfurt/polbotcheck/blob/cef5136008301dcb16eb06dc5f18d00510c77f35/webapi/analytics/flask_api_server.py#L28-L38
import os if not os.environ.get('FLASK_ENV') == 'production': from dotenv import load_dotenv, find_dotenv load_dotenv(find_dotenv(), override=True) from flask import Flask, jsonify from flask_cors import CORS app = Flask(__name__) CORS(app) print("Value of WEB_CONCURRENCY is %s" % os.environ.get('WEB_CONCURRENCY')) if int(os.environ.get('WEB_CONCURRENCY')) > 1: from random import randint from time import sleep interval = randint(1,5) print("Sleep for %d seconds to avoid too many concurrent db hits" % interval) sleep(interval) import db @app.route("/pbc")
MIT License
luscoma/pyrazor
lex.py
RazorLexer.shouldEscape
python
def shouldEscape(self, token): return token[1] != '!'
Returns false if this token should not be html escaped
https://github.com/luscoma/pyrazor/blob/15047a027339895652e700bba8b0884247786f00/lex.py#L81-L83
import sexylexer import html from scopestack import ScopeStack import re class Token: ESCAPED = "ESCAPED" COMMENT = "COMMENT" LINECOMMENT = "LINECOMMENT" ONELINE = "ONELINE" MULTILINE = "MULTILINE" EXPLICITMULTILINEEND = "EXPLICITMULTILINEEND" PARENEXPRESSION = "PARENEXPRESSION" EXPRESSION = "EXPRESSION" TEXT = "TEXT" CODE = "CODE" NEWLINE = "NEWLINE" INDENT = "INDENT" EMPTYLINE = "EMPTYLINE" XMLSTART = "XMLSTART" XMLFULLSTART = "XMLFULLSTART" XMLEND = "XMLEND" XMLSELFCLOSE = "XMLSELFCLOSE" PRINTLINE = "PRINTLINE" def bind(handler): return lambda scanner, token: handler(scanner, token); class RazorLexer(object): @staticmethod def create(ignore_whitespace=False): lex = RazorLexer(ignore_whitespace) lex.rules = ( (Token.NEWLINE, (r"[\r]?[\n][ \t]*", bind(lex.newline))), (Token.ESCAPED, (r"@@", bind(lex.escaped))), (Token.LINECOMMENT, (r"@#[^\r\n]*?$", bind(lex.linecomment))), (Token.ONELINE, (r"@(?:import|from|model) .+$", bind(lex.oneline))), (Token.MULTILINE, (r"@\w*.*:$", bind(lex.multiline))), (Token.PARENEXPRESSION, (r"@!?\(", bind(lex.paren_expression))), (Token.EXPRESSION, (r"@!?(\w+(?:(?:\[.+\])|(?:\(.*\)))?(?:\.[a-zA-Z]+(?:(?:\[.+\])|(?:\(.*\)))?)*)", bind(lex.expression))), (Token.XMLFULLSTART, (r"[ \t]*<\w[^@\r\n]*?>", bind(lex.xmlStart))), (Token.XMLSTART, (r"[ \t]*<\w[^@\r\n>]*", bind(lex.xmlStart))), (Token.XMLEND, (r"[ \t]*</[^@\r\n]+[>]", bind(lex.xmlEnd))), (Token.XMLSELFCLOSE, (r"[^@]+/>[ \t]*", bind(lex.xmlSelfClose))), (Token.TEXT, (r"[^@\r\n<]+", bind(lex.text))), ) lex.multilineRules = ( (Token.EMPTYLINE, (r"[\r]?[\n][ \t]*$", bind(lex.empty_line))), (Token.EXPLICITMULTILINEEND, (r"[\r]?[\n][ \t]*\w*.*:@", bind(lex.multiline_end))), (Token.NEWLINE, (r"[\r]?[\n][ \t]*", bind(lex.newline))), (Token.XMLFULLSTART, (r"[ \t]*<\w[^@\r\n]*?>", bind(lex.xmlStart))), (Token.XMLSTART, (r"[ \t]*<\w[^@\r\n>]*", bind(lex.xmlStart))), (Token.XMLEND, (r"[ \t]*</[^@\r\n]+[>]", bind(lex.xmlEnd))), (Token.XMLSELFCLOSE, (r"[^@]+/>[ \t]*", bind(lex.xmlSelfClose))), (Token.MULTILINE, (r"\w*.*:$", bind(lex.multiline))), (Token.PRINTLINE, (r"[ \t]*print[ \t]*[(][ \t]*['\"].*[\"'][ \t]*[)]", bind(lex.printLine))), (Token.CODE, (r".+", bind(lex.code))), ) lex.lexer = sexylexer.Lexer(lex.rules,lex.multilineRules) return lex def __init__(self, ignore_whitespace): self.scope = ScopeStack(ignore_whitespace) self.ignore_whitespace = ignore_whitespace self.Mode = [] self.NewLine = False def scan(self, text): if self.ignore_whitespace: return self.lexer.scan(text.lstrip()) return self.lexer.scan(text)
MIT License
bigpon/qpnet
src/utils/utils.py
shape_hdf5
python
def shape_hdf5(hdf5_name, hdf5_path): if check_hdf5(hdf5_name, hdf5_path): with h5py.File(hdf5_name, "r") as f: hdf5_shape = f[hdf5_path].shape return hdf5_shape else: print("There is no such a file or dataset. (%s, %s)" % (hdf5_name, hdf5_path)) sys.exit(-1)
FUNCTION TO GET HDF5 DATASET SHAPE Args: hdf5_name (str): filename of hdf5 file hdf5_path (str): dataset name in hdf5 file Return: (tuple): shape of dataset
https://github.com/bigpon/qpnet/blob/657fcb01b23e9e3371b5a4b2ebeec5757ad33e2d/src/utils/utils.py#L71-L87
from __future__ import division from __future__ import print_function import fnmatch import os import sys import threading import h5py import numpy as np from numpy.matlib import repmat def check_hdf5(hdf5_name, hdf5_path): if not os.path.exists(hdf5_name): return False else: with h5py.File(hdf5_name, "r") as f: if hdf5_path in f: return True else: return False def read_hdf5(hdf5_name, hdf5_path): if not os.path.exists(hdf5_name): print("ERROR: There is no such a hdf5 file. (%s)" % hdf5_name) print("Please check the hdf5 file path.") sys.exit(-1) hdf5_file = h5py.File(hdf5_name, "r") if hdf5_path not in hdf5_file: print("ERROR: There is no such a data in hdf5 file. (%s)" % hdf5_path) print("Please check the data path in hdf5 file('%s')." % hdf5_name) sys.exit(-1) hdf5_data = hdf5_file[hdf5_path][()] hdf5_file.close() return hdf5_data
Apache License 2.0
turksat/pns
pns/controllers/channel.py
unregister_user
python
def unregister_user(channel_id, pns_id): user_obj = User.query.filter_by(pns_id=pns_id).first() channel_obj = Channel.query.get(channel_id) if not user_obj or not channel_obj: return jsonify(success=False, message='not found'), 404 if channel_obj.unsubscribe_user(user_obj): return jsonify(success=True, message={'channel': channel_obj.to_dict(), 'user': user_obj.to_dict()}) else: return jsonify(success=False), 500
@api {delete} /channels/:channel_id/members/:pns_id Unsubscribe from Channel @apiVersion 1.0.0 @apiName UnregisterUser @apiGroup Channel @apiSuccess {Boolean} success Request status @apiSuccess {Object} message Respond payload @apiSuccess {Object} message.channel Channel object @apiSuccess {Object} message.user User object
https://github.com/turksat/pns/blob/e43d4b010add99ffaea4f07a29dc07f5b81a0d9f/pns/controllers/channel.py#L246-L267
from flask import Blueprint, jsonify, request from pns.app import app from pns.forms import CreateChannelForm from pns.models import db, Channel, User, Alert from pns.json_schemas import registration_schema channel = Blueprint('channel', __name__) @channel.route('/channels', methods=['POST']) def create_channel(): form = CreateChannelForm() if not form.validate_on_submit(): return jsonify(success=False, message=form.errors), 400 name = request.values.get('name') description = request.values.get('description', None) channel_obj = Channel.query.filter_by(name=name).first() if channel_obj: return jsonify(success=True, message={'channel': channel_obj.to_dict()}) channel_obj = Channel() channel_obj.name = name if description: channel_obj.description = description db.session.add(channel_obj) try: db.session.commit() return jsonify(success=True, message={'channel': channel_obj.to_dict()}) except Exception as ex: db.session.rollback() app.logger.exception(ex) return jsonify(success=False), 500 @channel.route('/channels', methods=['GET']) def list_channels(): try: offset = int(request.values.get('offset', 1)) limit = int(request.values.get('limit', 20)) except ValueError: offset = 1 limit = 20 query = (Channel .query .order_by(Channel.created_at.desc()) .paginate(page=offset, per_page=limit, error_out=False)) channels = [channel_obj.to_dict() for channel_obj in query.items] return jsonify(success=True, message={'channels': channels, 'total_pages': query.pages, 'current_page': offset, 'has_next': query.has_next}) @channel.route('/channels/<int:channel_id>', methods=['GET']) def get_channel(channel_id): channel_obj = Channel.query.get(channel_id) if not channel_obj: return jsonify(success=False, message='not found'), 404 return jsonify(success=True, message={'channel': channel_obj.to_dict()}) @channel.route('/channels/<int:channel_id>', methods=['DELETE']) def delete_channel(channel_id): channel_obj = Channel.query.get(channel_id) if not channel_obj: return jsonify(success=False, message='not found'), 404 db.session.delete(channel_obj) try: db.session.commit() return jsonify(success=True, message={'channel': channel_obj.to_dict()}) except Exception as ex: db.session.rollback() app.logger.exception(ex) return jsonify(success=False), 500 @channel.route('/channels/<int:channel_id>', methods=['PUT']) def edit_channel(channel_id): form = CreateChannelForm() if not form.validate_on_submit(): return jsonify(success=False, message=form.errors), 400 name = request.values.get('name') description = request.values.get('description', None) channel_obj = Channel.query.get(channel_id) if not channel_obj: return jsonify(success=False, message='not found'), 404 channel_obj.name = name if description: channel_obj.description = description db.session.add(channel_obj) try: db.session.commit() return jsonify(success=True, message={'channel': channel_obj.to_dict()}) except Exception as ex: db.session.rollback() app.logger.exception(ex) return jsonify(success=False), 500 @channel.route('/channels/<int:channel_id>/members', methods=['POST']) def register_user(channel_id): json_req = request.get_json(force=True) try: registration_schema.validate(json_req) except Exception as ex: return jsonify(success=False, message={'error': str(ex)}), 400 pns_id_list = [pns_id.strip() for pns_id in json_req['pns_id']] channel_obj = Channel.query.get(channel_id) if not channel_obj: return jsonify(success=False, message='not found'), 404 users = User.query.filter(User.pns_id.in_(pns_id_list)).all() for user in users: channel_obj.subscribe_user(user) return jsonify(success=True, message={'channel': channel_obj.to_dict(), 'users': [user.to_dict() for user in users]}) @channel.route('/channels/<int:channel_id>/members', methods=['GET']) def list_channel_members(channel_id): try: offset = int(request.values.get('offset', 1)) limit = int(request.values.get('limit', 20)) except ValueError: offset = 1 limit = 20 channel_obj = Channel.query.get(channel_id) if not channel_obj: return jsonify(success=False, message='not found'), 404 query = (channel_obj .subscribers .paginate(page=offset, per_page=limit, error_out=False)) users = [user.to_dict() for user in query.items] return jsonify(success=True, message={'users': users, 'total_pages': query.pages, 'current_page': offset, 'has_next': query.has_next}) @channel.route('/channels/<int:channel_id>/members/<pns_id>', methods=['DELETE'])
Apache License 2.0
atomlinter/linter-pylama
bin/deps/pylint/checkers/logging.py
LoggingChecker._check_format_string
python
def _check_format_string(self, node, format_arg): num_args = _count_supplied_tokens(node.args[format_arg + 1:]) if not num_args: return format_string = node.args[format_arg].value if not isinstance(format_string, six.string_types): required_num_args = 0 else: try: keyword_args, required_num_args = utils.parse_format_string(format_string) if keyword_args: return except utils.UnsupportedFormatCharacter as ex: char = format_string[ex.index] self.add_message('logging-unsupported-format', node=node, args=(char, ord(char), ex.index)) return except utils.IncompleteFormatString: self.add_message('logging-format-truncated', node=node) return if num_args > required_num_args: self.add_message('logging-too-many-args', node=node) elif num_args < required_num_args: self.add_message('logging-too-few-args', node=node)
Checks that format string tokens match the supplied arguments. Args: node (astroid.node_classes.NodeNG): AST node to be checked. format_arg (int): Index of the format string in the node arguments.
https://github.com/atomlinter/linter-pylama/blob/9157f7f84083007161814c93b537a712984f3c86/bin/deps/pylint/checkers/logging.py#L216-L252
import string import six import astroid from pylint import checkers from pylint import interfaces from pylint.checkers import utils from pylint.checkers.utils import check_messages MSGS = { 'W1201': ('Specify string format arguments as logging function parameters', 'logging-not-lazy', 'Used when a logging statement has a call form of ' '"logging.<logging method>(format_string % (format_args...))". ' 'Such calls should leave string interpolation to the logging ' 'method itself and be written ' '"logging.<logging method>(format_string, format_args...)" ' 'so that the program may avoid incurring the cost of the ' 'interpolation in those cases in which no message will be ' 'logged. For more, see ' 'http://www.python.org/dev/peps/pep-0282/.'), 'W1202': ('Use % formatting in logging functions and pass the % ' 'parameters as arguments', 'logging-format-interpolation', 'Used when a logging statement has a call form of ' '"logging.<logging method>(format_string.format(format_args...))"' '. Such calls should use % formatting instead, but leave ' 'interpolation to the logging function by passing the parameters ' 'as arguments.'), 'E1200': ('Unsupported logging format character %r (%#02x) at index %d', 'logging-unsupported-format', 'Used when an unsupported format character is used in a logging\ statement format string.'), 'E1201': ('Logging format string ends in middle of conversion specifier', 'logging-format-truncated', 'Used when a logging statement format string terminates before\ the end of a conversion specifier.'), 'E1205': ('Too many arguments for logging format string', 'logging-too-many-args', 'Used when a logging format string is given too many arguments.'), 'E1206': ('Not enough arguments for logging format string', 'logging-too-few-args', 'Used when a logging format string is given too few arguments.'), } CHECKED_CONVENIENCE_FUNCTIONS = { 'critical', 'debug', 'error', 'exception', 'fatal', 'info', 'warn', 'warning' } def is_method_call(func, types=(), methods=()): return (isinstance(func, astroid.BoundMethod) and isinstance(func.bound, astroid.Instance) and (func.bound.name in types if types else True) and (func.name in methods if methods else True)) class LoggingChecker(checkers.BaseChecker): __implements__ = interfaces.IAstroidChecker name = 'logging' msgs = MSGS options = (('logging-modules', {'default': ('logging',), 'type': 'csv', 'metavar': '<comma separated list>', 'help': 'Logging modules to check that the string format ' 'arguments are in logging function parameter format'} ), ) def visit_module(self, node): self._logging_names = set() logging_mods = self.config.logging_modules self._logging_modules = set(logging_mods) self._from_imports = {} for logging_mod in logging_mods: parts = logging_mod.rsplit('.', 1) if len(parts) > 1: self._from_imports[parts[0]] = parts[1] def visit_importfrom(self, node): try: logging_name = self._from_imports[node.modname] for module, as_name in node.names: if module == logging_name: self._logging_names.add(as_name or module) except KeyError: pass def visit_import(self, node): for module, as_name in node.names: if module in self._logging_modules: self._logging_names.add(as_name or module) @check_messages(*(MSGS.keys())) def visit_call(self, node): def is_logging_name(): return (isinstance(node.func, astroid.Attribute) and isinstance(node.func.expr, astroid.Name) and node.func.expr.name in self._logging_names) def is_logger_class(): try: for inferred in node.func.infer(): if isinstance(inferred, astroid.BoundMethod): parent = inferred._proxied.parent if (isinstance(parent, astroid.ClassDef) and (parent.qname() == 'logging.Logger' or any(ancestor.qname() == 'logging.Logger' for ancestor in parent.ancestors()))): return True, inferred._proxied.name except astroid.exceptions.InferenceError: pass return False, None if is_logging_name(): name = node.func.attrname else: result, name = is_logger_class() if not result: return self._check_log_method(node, name) def _check_log_method(self, node, name): if name == 'log': if node.starargs or node.kwargs or len(node.args) < 2: return format_pos = 1 elif name in CHECKED_CONVENIENCE_FUNCTIONS: if node.starargs or node.kwargs or not node.args: return format_pos = 0 else: return if isinstance(node.args[format_pos], astroid.BinOp): binop = node.args[format_pos] if (binop.op == '%' or binop.op == '+' and len([_operand for _operand in (binop.left, binop.right) if self._is_operand_literal_str(_operand)]) == 1): self.add_message('logging-not-lazy', node=node) elif isinstance(node.args[format_pos], astroid.Call): self._check_call_func(node.args[format_pos]) elif isinstance(node.args[format_pos], astroid.Const): self._check_format_string(node, format_pos) @staticmethod def _is_operand_literal_str(operand): return isinstance(operand, astroid.Const) and operand.name == 'str' def _check_call_func(self, node): func = utils.safe_infer(node.func) types = ('str', 'unicode') methods = ('format',) if is_method_call(func, types, methods) and not is_complex_format_str(func.bound): self.add_message('logging-format-interpolation', node=node)
MIT License
blurstudio/cross3d
cross3d/studiomax/studiomaxscene.py
StudiomaxScene._getNativeMap
python
def _getNativeMap(self): return mxs.materialBrowseDlg(mxs.pyhelper.namify("maps"))
\remarks implements the AbstractScene._getNativeMap method to invoke the application's ability to let a user select a Map from the scene \return <Py3dsMax.mxs.TextureMap> nativeMap || None
https://github.com/blurstudio/cross3d/blob/277968d1227de740fc87ef61005c75034420eadf/cross3d/studiomax/studiomaxscene.py#L660-L665
import os import re import glob import getpass import win32con import win32api import traceback import math import cross3d from Py3dsMax import mxs from PyQt4.QtCore import QTimer from cross3d import UserProps, application, FrameRange, constants from cross3d.abstract.abstractscene import AbstractScene from cross3d.constants import UpVector, ExtrapolationType, RendererType from mxscustattribdef import MXSCustAttribDef class EnvironmentMapHolder(MXSCustAttribDef): @classmethod def define(cls): cls.setAttrName('OnionMapHolder') cls.defineParam('environmentMap', 'textureMap', paramId='eMap') EnvironmentMapHolder.register() class EnvironmentMapsHolder(MXSCustAttribDef): def init(self): MXSCustAttribDef.init(self) self.setValue('environmentMaps', []) @classmethod def define(cls): cls.setAttrName('OnionAltMapsHolder') cls.defineParam('environmentMaps', 'textureMapTab', paramId='aMps') cls.defineParam('currentIndex', 'integer', paramId='mi') EnvironmentMapsHolder.register() class CustomProperties(MXSCustAttribDef): def init(self): MXSCustAttribDef.init(self) self.setValue('keys', []) self.setValue('values', []) @classmethod def define(cls): cls.setAttrName('BlurCustomProperties') cls.defineParam('keys', 'stringTab') cls.defineParam('values', 'stringTab') CustomProperties.register() class SceneMetaData(MXSCustAttribDef): version = 1.63 def __init__(self, mxsInstance): MXSCustAttribDef.__init__(self, mxsInstance) self._mapsHolder = None self._mapHolder = None self._custProperties = None def customProperties(self): if (not self._custProperties): root = mxs.rootNode data = CustomProperties.find(root) if (not data): data = CustomProperties.createUnique(root) self._custProperties = data return self._custProperties def environmentMapCache(self): if (not self._mapHolder): root = mxs.rootNode data = EnvironmentMapHolder.find(root) if (not data): data = EnvironmentMapHolder.createUnique(root) self._mapHolder = data return self._mapHolder def environmentMapsCache(self): if (not self._mapsHolder): root = mxs.rootNode data = EnvironmentMapsHolder.find(root) if (not data): data = EnvironmentMapsHolder.createUnique(root) self._mapsHolder = data return self._mapsHolder def init(self): MXSCustAttribDef.init(self) self.setValue('version', SceneMetaData.version) self.setValue('layerGroupNames', ['Main']) self.setValue('layerGroupStates', [True]) @classmethod def define(cls): cls.setAttrName('OnionData') cls.defineParam('version', 'float', paramId='v') cls.defineParam('layerGroupNames', 'stringTab', paramId='gn') cls.defineParam('layerGroupStates', 'boolTab', paramId='go') cls.defineParam('materialLibraryList', 'materialTab', paramId='mtl') cls.defineParam('baseMaterialCache', 'materialTab', paramId='ms') SceneMetaData.register() class StudiomaxScene(AbstractScene): _fbxIOPresetModifiedTime = 0 _orignalFBXPresets = {} def __init__(self): AbstractScene.__init__(self) self._metaData = None self._mapCache = None self._connectDefined = False def _cacheNativeMaterial(self, cacheType, nativeMaterial): from cross3d.constants import MaterialCacheType if (cacheType == MaterialCacheType.BaseMaterial): data = self.metaData() cache = list(data.value('baseMaterialCache', [])) if (nativeMaterial and not nativeMaterial in cache): cache.append(nativeMaterial) data.setValue('baseMaterialCache', cache) return True return False def _cachedNativeMaterial(self, cacheType, materialId, default=None): unique_id = mxs.blurUtil.uniqueId cache = self._cachedNativeMaterials(cacheType) for mtl in cache: if (mtl == None): continue uid = str(unique_id(mtl)) unm = str(mtl.name) if (materialId == uid or materialId == unm): return mtl return None def _cachedNativeMaterials(self, cacheType): from cross3d.constants import MaterialCacheType if (cacheType == MaterialCacheType.MaterialOverrideList): return self.metaData().value('materialLibraryList') if (cacheType == MaterialCacheType.BaseMaterial): return self.metaData().value('baseMaterialCache') return [] def _cacheNativeMap(self, cacheType, nativeMap): from cross3d.constants import MapCacheType if (cacheType == MapCacheType.EnvironmentMap): data = self.metaData().environmentMapsCache() maps = list(data.value('environmentMaps')) maps.append(nativeMap) data.setValue('environmentMaps', maps) return False def _cachedNativeMap(self, cacheType, uniqueId, default=None): unique_id = mxs.blurUtil.uniqueId cache = self._cachedNativeMaps(cacheType) for nativeMap in cache: if (nativeMap == None): continue uid = str(unique_id(nativeMap)) unm = str(nativeMap.name) if (uniqueId == uid or uniqueId == unm): return nativeMap return None def _cachedNativeMaps(self, cacheType): from cross3d.constants import MapCacheType if (cacheType == MapCacheType.EnvironmentMap): data = self.metaData().environmentMapsCache() return data.value('environmentMaps') return [] def _clearNativeMaterialOverride(self, nativeObjects): from cross3d.constants import MaterialCacheType from cross3d.studiomax import StudiomaxAppData get_userprop = mxs.getUserProp set_userprop = mxs.setUserProp get_appdata = mxs.getAppData del_appdata = mxs.deleteAppData superclassof = mxs.superClassOf geoclass = mxs.GeometryClass for obj in nativeObjects: if (not superclassof(obj) == geoclass): continue mid = get_appdata(obj, int(StudiomaxAppData.AltMtlIndex)) if (mid == None): mid = get_userprop(obj, 'basematerial') if (mid and mid != 'undefined'): del_appdata(obj, int(StudiomaxAppData.AltMtlIndex)) set_userprop(obj, 'basematerial', 'undefined') if (mid == '0'): obj.material = None else: obj.material = self._cachedNativeMaterial(MaterialCacheType.BaseMaterial, mid) return True def _clearNativePropSetOverride(self, nativeObjects): from cross3d import SceneObjectPropSet from cross3d.studiomax import StudiomaxAppData get_appdata = mxs.getAppData del_appdata = mxs.deleteAppData get_userprop = mxs.getUserProp set_userprop = mxs.setUserProp altpropindex = int(StudiomaxAppData.AltPropIndex) for obj in nativeObjects: props = get_appdata(obj, altpropindex) if (not props): props = get_userprop(obj, 'baseprops') if (props and props != 'undefined'): nprop = SceneObjectPropSet(self, None) nprop._setValueString(props) for key in nprop.propertyNames(): if (nprop.isCustomProperty(key)): set_userprop(obj, key, str(self._toNativeValue(nprop.value(key)))) else: obj.setProperty(key, self._toNativeValue(nprop.value(key))) set_userprop(obj, 'baseprops', 'undefined') del_appdata(obj, altpropindex) return True def _createNativeModel(self, name='Model', nativeObjects=[], referenced=False): name = 'Model' if not name else name output = mxs.Point(cross=False, name=name) userProps = UserProps(output) userProps['model'] = True if nativeObjects: for nativeObject in nativeObjects: nativeObject.name = '.'.join([name, nativeObject.name]) nativeObjects.append(output) return output def _createNativeLayer(self, name, nativeObjects=[]): name = unicode(name) lay = self._findNativeLayer(name) if not lay: lay = mxs.layerManager.newLayerFromName(name) if lay: from cross3d import SceneLayer SceneLayer(self, lay)._addNativeObjects(nativeObjects) return lay def _createNativeLayerGroup(self, name, nativeLayers=[]): names = list(self.metaData().value('layerGroupNames')) states = list(self.metaData().value('layerGroupStates')) if (not name in names): names.append(str(name)) states.append(True) self.metaData().setValue('layerGroupNames', names) self.metaData().setValue('layerGroupStates', states) return name return '' def _createNativeCamera(self, name='Camera', type='Standard', target=None, rotationOrder=None): if type == 'Physical': if application.version() < 18: nativeCamera = mxs.VRayPhysicalCamera(target=target) if target else mxs.VRayPhysicalCamera() else: nativeCamera = mxs.Physical(target=target) if target else mxs.Physical() else: nativeCamera = mxs.FreeCamera() nativeCamera.name = name return nativeCamera def _createNativeRenderer(self, rendererType): from cross3d.constants import RendererType if (rendererType == RendererType.Scanline): return mxs.Default_Scanline_Renderer() elif (rendererType == RendererType.MentalRay): return mxs.mental_ray_renderer() elif (rendererType == RendererType.VRay): renderers = mxs.rendererClass.classes for renderer in renderers: clsname = str(renderer) if (not clsname.startswith('V_Ray_RT') and clsname.startswith('V_Ray_')): return renderer() else: renderers = mxs.rendererClass.classes for renderer in renderers: clsname = str(renderer) if (clsname == rendererType): return renderer() return None def _createNativeTarget(self, name='Camera.Target'): return mxs.targetobject(name=name) def _currentNativeCamera(self): return mxs.viewport.getCamera() def _currentNativeRenderer(self): return mxs.renderers.current def _exportNativeObjects(self, objects, filename=''): return mxs.saveNodes(objects, filename) def _findNativeObject(self, name='', uniqueId=0): name = str(name) output = None if (name): output = mxs.getNodeByName(str(name)) if (not output and uniqueId): output = mxs.refByUniqueId(uniqueId) return output def _findNativeLayer(self, name='', uniqueId=0): if (name == 'World Layer'): name = '0' output = None if (name): output = mxs.layerManager.getLayerFromName(str(name)) if (not output and uniqueId): output = mxs.layerManager.refByUniqueId(uniqueId) return output def _findNativeLayerGroup(self, name='', uniqueId=0): names = list(self.metaData().value('layerGroupNames')) name = str(name) if (name in names): return name return None def _findNativeMaterial(self, materialName='', materialId=0): materialName = str(materialName) if (not (materialName or materialId)): return None uniqueid = mxs.blurUtil.uniqueId for material in self._nativeMaterials(): if (material.name == materialName or uniqueid(material) == materialId): return material cross3d.logger.debug('could not find material (%s - %s)' % (materialName, materialId)) return None def _findNativeMap(self, name='', uniqueId=0): name = str(name) if (not (name or uniqueId)): return None uniqueid = mxs.blurUtil.uniqueId for nmap in self._collectNativeMaps(): if (nmap.name == name or uniqueid(nmap) == uniqueId): return nmap return None def _freezeNativeObjects(self, nativeObjects, state): if (state): mxs.freeze(nativeObjects) else: mxs.unfreeze(nativeObjects) application.refresh() return True def _hideNativeObjects(self, nativeObjects, state): if (state): mxs.hide(nativeObjects) else: mxs.unhide(nativeObjects) application.refresh() return True def _fromNativeValue(self, nativeValue): classof = mxs.classof cls = classof(nativeValue) if (cls == mxs.Color): from PyQt4.QtGui import QColor return QColor(nativeValue.r, nativeValue.g, nativeValue.b) if (cls == mxs.Time): return int(nativeValue) if (cls == mxs.Name): return str(nativeValue) return nativeValue def _getNativeObject(self): return mxs.selectByName(single=True) def _getNativeMaterial(self): return mxs.materialBrowseDlg(mxs.pyhelper.namify("mats"))
MIT License
google/clusterfuzz
src/clusterfuzz/_internal/bot/fuzzers/dictionary_manager.py
DictionaryManager.parse_recommended_dictionary_from_data
python
def parse_recommended_dictionary_from_data(self, data): log_lines = data.splitlines() return self.parse_recommended_dictionary_from_log_lines(log_lines)
Extract recommended dictionary entriees from the given string. Args: data: A string containing data from a fuzzer log. Returns: A set containing recommended dictionary lines.
https://github.com/google/clusterfuzz/blob/e9e105d66f009356c4f3fe9ae7873ffff126b234/src/clusterfuzz/_internal/bot/fuzzers/dictionary_manager.py#L250-L260
import os import re from clusterfuzz._internal.base import errors from clusterfuzz._internal.base import utils from clusterfuzz._internal.bot.fuzzers import utils as fuzzer_utils from clusterfuzz._internal.google_cloud_utils import storage from clusterfuzz._internal.metrics import logs from clusterfuzz._internal.system import environment DICTIONARY_FILE_EXTENSION = '.dict' RECOMMENDED_DICTIONARY_FILENAME = ( 'recommended_dictionary%s' % DICTIONARY_FILE_EXTENSION) RECOMMENDED_DICTIONARY_HEADER = '# Recommended dictionary stored in GCS.' TOKEN_ANALYZE_DICT_METADATA = ' # Score: ' TOKEN_RECOMMENDED_DICT_END = 'End of recommended dictionary.' TOKEN_RECOMMENDED_DICT_START = 'Recommended dictionary.' TOKEN_USELESS_DICT_END = 'End of useless dictionary elements.' TOKEN_USELESS_DICT_START = 'Useless dictionary elements.' DICTIONARY_PART_PATTERN = re.compile(r'([^"]+\s*=\s*)?(.*)') def extract_dictionary_element(line): start_index = line.find('"') end_index = line.rfind('"') if start_index == -1 or end_index == -1 or start_index == end_index: return None element = line[start_index:end_index + 1] return element def get_default_dictionary_path(fuzz_target_path): return fuzzer_utils.get_supporting_file(fuzz_target_path, DICTIONARY_FILE_EXTENSION) def get_dictionary_size(dictionary_content): count = 0 for line in dictionary_content.splitlines(): if extract_dictionary_element(line): count += 1 return count def get_recommended_dictionary_gcs_path(fuzzer_name): bucket_name = environment.get_value('FUZZ_LOGS_BUCKET') bucket_subdirectory_name = 'dictionaries' recommended_dictionary_gcs_path = '/%s/%s/%s/%s' % ( bucket_name, bucket_subdirectory_name, fuzzer_name, RECOMMENDED_DICTIONARY_FILENAME) return recommended_dictionary_gcs_path def get_stats_for_dictionary_file(dictionary_path): if not dictionary_path or not os.path.exists(dictionary_path): return 0, 0 dictionary_content = utils.read_data_from_file( dictionary_path, eval_data=False).decode('utf-8') dictionaries = dictionary_content.split(RECOMMENDED_DICTIONARY_HEADER) manual_dictionary_size = get_dictionary_size(dictionaries[0]) if len(dictionaries) < 2: return manual_dictionary_size, 0 recommended_dictionary_size = get_dictionary_size(dictionaries[1]) return manual_dictionary_size, recommended_dictionary_size def merge_dictionary_files(original_dictionary_path, recommended_dictionary_path, merged_dictionary_path): if original_dictionary_path and os.path.exists(original_dictionary_path): merged_dictionary_data = utils.read_data_from_file( original_dictionary_path, eval_data=False).decode('utf-8') else: merged_dictionary_data = '' recommended_dictionary_lines = utils.read_data_from_file( recommended_dictionary_path, eval_data=False).decode('utf-8').splitlines() dictionary_lines_to_add = set() for line in recommended_dictionary_lines: if line not in merged_dictionary_data: dictionary_lines_to_add.add(line) merged_dictionary_data += '\n%s\n' % RECOMMENDED_DICTIONARY_HEADER merged_dictionary_data += '\n'.join(dictionary_lines_to_add) utils.write_data_to_file(merged_dictionary_data, merged_dictionary_path) def _fix_dictionary_line(line, dict_path): if not line or line.strip().startswith('#'): return line match = DICTIONARY_PART_PATTERN.match(line) if not match: raise errors.BadStateError( 'Failed to correct dictionary line "{line}" in {path}.'.format( line=line, path=dict_path)) name_part = match.group(1) or '' entry = match.group(2) if not entry and name_part: entry = name_part name_part = '' if entry == '"': entry = '"\\\""' if entry.startswith('"') and entry.endswith('"'): return name_part + entry new_entry = '' prev_character = '' for character in entry: if character == '"' and prev_character != '\\': new_entry += '\\' new_entry += character prev_character = character new_entry = '"{entry}"'.format(entry=new_entry) return name_part + new_entry def correct_if_needed(dict_path): if not dict_path or not os.path.exists(dict_path): return content = utils.read_data_from_file( dict_path, eval_data=False).decode('utf-8') new_content = '' for current_line in content.splitlines(): new_content += _fix_dictionary_line(current_line, dict_path) + '\n' if new_content.rstrip('\n') != content.rstrip('\n'): utils.write_data_to_file(new_content, dict_path) class DictionaryManager(object): def __init__(self, fuzzer_name): self._fuzzer_name = fuzzer_name self._gcs_path = get_recommended_dictionary_gcs_path(self.fuzzer_name) @property def fuzzer_name(self): return self._fuzzer_name @property def gcs_path(self): return self._gcs_path def _compare_and_swap_gcs_dictionary(self, old_content, new_content): current_content = storage.read_data(self.gcs_path).decode('utf-8') if current_content != old_content: return False, current_content storage.write_data(new_content.encode('utf-8'), self.gcs_path) return True, old_content def download_recommended_dictionary_from_gcs(self, local_dict_path): if environment.is_lib(): return 0 if not storage.exists(self.gcs_path): return False if storage.copy_file_from(self.gcs_path, local_dict_path): return True logs.log('Downloading %s failed.' % self.gcs_path) return False
Apache License 2.0
wdm0006/dummyrdd
dummy_spark/rdd.py
RDD.persist
python
def persist(self, storageLevel=None): return self
:param storageLevel: :return:
https://github.com/wdm0006/dummyrdd/blob/d66c30495cbaa001a744128c89d41fb55741fba5/dummy_spark/rdd.py#L101-L107
import random import uuid from collections import OrderedDict from functools import reduce from dummy_spark.resultsiterable import ResultIterable __author__ = 'willmcginnis' class RDD(object): def __init__(self, jrdd, ctx, jrdd_deserializer=None): self._id = str(uuid.uuid4()) if jrdd is None: self._jrdd = [] else: if isinstance(jrdd, list): self._jrdd = jrdd elif isinstance(jrdd, set): self._jrdd = list(jrdd) else: raise AttributeError('Type %s for jrdd not supported' % (type(jrdd), )) self.ctx = ctx self.is_cached = True self._name = 'dummpy-rdd' self.is_checkpointed = False self._jrdd_deserializer = jrdd_deserializer self.partitioner = None def id(self): return self._id @property def context(self): return self.ctx def name(self): return self._name def setName(self, name): self._name = name return self def __repr__(self): return str(self._jrdd) def cache(self): return self
BSD 3-Clause New or Revised License
alex-sherman/unsync
test/test_unsync.py
set_attr
python
def set_attr(attr_value): @wraps(attr_value) def wrapper(f): f.attr = attr_value return f return wrapper
Sample decorator for testing nested unsync decorators.
https://github.com/alex-sherman/unsync/blob/88c4ecd9c74a86c23a50b0adfb8394807725e993/test/test_unsync.py#L114-L124
from functools import wraps from unittest import TestCase import asyncio import concurrent import time from unsync import unsync from unsync.unsync import Unfuture class DecoratorTests(TestCase): def test_exception(self): class TestException(Exception): pass @unsync async def error(): await asyncio.sleep(0.1) raise TestException with self.assertRaises(TestException): error().result() def test_parallelism(self): calls = [] @unsync async def sleep(): calls.append('a') await asyncio.sleep(0.1) calls.append('b') results = [] for _ in range(100): results.append(sleep()) for result in results: result.result() self.assertEqual(list(sorted(calls)), calls) def test_apply_to_function(self): async def sleep(): await asyncio.sleep(0.1) return "derp" unsync_sleep = unsync(sleep) self.assertEqual(unsync_sleep().result(), "derp") def test_apply_to_built_in(self): unsync_sleep = unsync(time.sleep) unsync_sleep(0.1).result() def test_future_integration(self): asyncio_future = asyncio.Future(loop=unsync.loop) @unsync async def wrapper(_future): return await _future result = wrapper(asyncio_future) with self.assertRaises(concurrent.futures.TimeoutError): result.result(timeout=0.1) self.assertFalse(result.done()) unsync.loop.call_soon_threadsafe(lambda: asyncio_future.set_result('faff')) self.assertEqual('faff', result.result(timeout=0.1)) def test_unfuture_integration(self): unfuture = Unfuture() @unsync async def wrapper(_future): result = await _future return result result = wrapper(unfuture) with self.assertRaises(concurrent.futures.TimeoutError): result.result(timeout=0.1) self.assertFalse(result.done()) unfuture.set_result('faff') self.assertEqual('faff', result.result(timeout=0.1)) def test_instance_methods(self): class Class: @unsync async def wait(self): await asyncio.sleep(0.1) return 'faff' self.assertEqual('faff', Class().wait().result()) def test_passing_arguments(self): @unsync(faff='faff') def cpu_bound(): return 'faff' self.assertEqual('faff', cpu_bound().result()) def test_implementation_without_decorator(self): def function_name(x: str) -> Unfuture[str]: async_method = unsync(__function_name_synced) return async_method(x) def __function_name_synced(x: str) -> str: return x + 'a' future_result = function_name('b') self.assertEqual('ba', future_result.result())
MIT License
tomplus/kubernetes_asyncio
kubernetes_asyncio/client/models/v1_token_request_spec.py
V1TokenRequestSpec.audiences
python
def audiences(self, audiences): if self.local_vars_configuration.client_side_validation and audiences is None: raise ValueError("Invalid value for `audiences`, must not be `None`") self._audiences = audiences
Sets the audiences of this V1TokenRequestSpec. Audiences are the intendend audiences of the token. A recipient of a token must identitfy themself with an identifier in the list of audiences of the token, and otherwise should reject the token. A token issued for multiple audiences may be used to authenticate against any of the audiences listed but implies a high degree of trust between the target audiences. # noqa: E501 :param audiences: The audiences of this V1TokenRequestSpec. # noqa: E501 :type: list[str]
https://github.com/tomplus/kubernetes_asyncio/blob/22bf0f4ec775b920abc9cee86bb38abcfc57506d/kubernetes_asyncio/client/models/v1_token_request_spec.py#L76-L87
import pprint import re import six from kubernetes_asyncio.client.configuration import Configuration class V1TokenRequestSpec(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'audiences': 'list[str]', 'bound_object_ref': 'V1BoundObjectReference', 'expiration_seconds': 'int' } attribute_map = { 'audiences': 'audiences', 'bound_object_ref': 'boundObjectRef', 'expiration_seconds': 'expirationSeconds' } def __init__(self, audiences=None, bound_object_ref=None, expiration_seconds=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._audiences = None self._bound_object_ref = None self._expiration_seconds = None self.discriminator = None self.audiences = audiences if bound_object_ref is not None: self.bound_object_ref = bound_object_ref if expiration_seconds is not None: self.expiration_seconds = expiration_seconds @property def audiences(self): return self._audiences @audiences.setter
Apache License 2.0
transientskp/tkp
tkp/sourcefinder/utils.py
generate_subthresholds
python
def generate_subthresholds(min_value, max_value, num_thresholds): subthrrange = numpy.logspace( 0.0, numpy.log(max_value + 1 - min_value), num=num_thresholds+1, base=numpy.e, endpoint=False )[1:] subthrrange += (min_value - 1) return subthrrange
Generate a series of ``num_thresholds`` logarithmically spaced values in the range (min_value, max_value) (both exclusive).
https://github.com/transientskp/tkp/blob/c4d6c3c59d51c083509316ba0e25dd8e732ee23b/tkp/sourcefinder/utils.py#L11-L29
import numpy import math import scipy.integrate from tkp.sourcefinder.gaussian import gaussian from tkp.utility import coordinates
BSD 2-Clause Simplified License
zachjweiner/pystella
pystella/sectors.py
Sector.stress_tensor
python
def stress_tensor(self, mu, nu, drop_trace=True): raise NotImplementedError
:arg drop_trace: Whether to drop the term :math:`g_{\\mu\\nu} \\mathcal{L}`. Defauls to *False*. :returns: The component :math:`T_{\\mu\\nu}` of the stress-energy tensor of the particular :class:`Sector`. Used by :class:`TensorPerturbationSector`, with ``drop_trace=True``.
https://github.com/zachjweiner/pystella/blob/97aa42aa9aaa4765ab5dc858e90921c6959b7ce7/pystella/sectors.py#L79-L89
__copyright__ = "Copyright (C) 2019 Zachary J Weiner" __license__ = """ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import numpy as np from pystella import DynamicField, Field from pystella.field import diff from pymbolic import var __doc__ = """ .. currentmodule:: pystella .. autoclass:: Sector .. autoclass:: ScalarSector .. autoclass:: TensorPerturbationSector .. currentmodule:: pystella.sectors .. autofunction:: get_rho_and_p """ eta = [-1, 1, 1, 1] class Sector: def __init__(self): raise NotImplementedError @property def rhs_dict(self): raise NotImplementedError @property def reducers(self): raise NotImplementedError
MIT License
yandex-cloud/python-sdk
yandex/cloud/mdb/clickhouse/v1/cluster_service_pb2_grpc.py
ClusterServiceServicer.DeleteShardGroup
python
def DeleteShardGroup(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
Deletes the specified shard group.
https://github.com/yandex-cloud/python-sdk/blob/6ddaaaf0ad01d8fc36cb72957f70a6e7943a5ce7/yandex/cloud/mdb/clickhouse/v1/cluster_service_pb2_grpc.py#L380-L385
import grpc from yandex.cloud.mdb.clickhouse.v1 import cluster_pb2 as yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__pb2 from yandex.cloud.mdb.clickhouse.v1 import cluster_service_pb2 as yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2 from yandex.cloud.operation import operation_pb2 as yandex_dot_cloud_dot_operation_dot_operation__pb2 class ClusterServiceStub(object): def __init__(self, channel): self.Get = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/Get', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.GetClusterRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__pb2.Cluster.FromString, ) self.List = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/List', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClustersRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClustersResponse.FromString, ) self.Create = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/Create', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.CreateClusterRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.Update = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/Update', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.UpdateClusterRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.Delete = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/Delete', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.DeleteClusterRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.Start = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/Start', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.StartClusterRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.Stop = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/Stop', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.StopClusterRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.Move = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/Move', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.MoveClusterRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.AddZookeeper = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/AddZookeeper', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.AddClusterZookeeperRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.Backup = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/Backup', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.BackupClusterRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.Restore = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/Restore', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.RestoreClusterRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.RescheduleMaintenance = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/RescheduleMaintenance', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.RescheduleMaintenanceRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.ListLogs = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/ListLogs', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterLogsRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterLogsResponse.FromString, ) self.StreamLogs = channel.unary_stream( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/StreamLogs', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.StreamClusterLogsRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.StreamLogRecord.FromString, ) self.ListOperations = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/ListOperations', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterOperationsRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterOperationsResponse.FromString, ) self.ListBackups = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/ListBackups', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterBackupsRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterBackupsResponse.FromString, ) self.ListHosts = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/ListHosts', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterHostsRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterHostsResponse.FromString, ) self.AddHosts = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/AddHosts', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.AddClusterHostsRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.DeleteHosts = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/DeleteHosts', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.DeleteClusterHostsRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.GetShard = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/GetShard', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.GetClusterShardRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__pb2.Shard.FromString, ) self.ListShards = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/ListShards', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterShardsRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterShardsResponse.FromString, ) self.AddShard = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/AddShard', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.AddClusterShardRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.UpdateShard = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/UpdateShard', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.UpdateClusterShardRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.DeleteShard = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/DeleteShard', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.DeleteClusterShardRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.GetShardGroup = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/GetShardGroup', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.GetClusterShardGroupRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__pb2.ShardGroup.FromString, ) self.ListShardGroups = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/ListShardGroups', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterShardGroupsRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.ListClusterShardGroupsResponse.FromString, ) self.CreateShardGroup = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/CreateShardGroup', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.CreateClusterShardGroupRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.UpdateShardGroup = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/UpdateShardGroup', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.UpdateClusterShardGroupRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.DeleteShardGroup = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/DeleteShardGroup', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.DeleteClusterShardGroupRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.CreateExternalDictionary = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/CreateExternalDictionary', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.CreateClusterExternalDictionaryRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) self.DeleteExternalDictionary = channel.unary_unary( '/yandex.cloud.mdb.clickhouse.v1.ClusterService/DeleteExternalDictionary', request_serializer=yandex_dot_cloud_dot_mdb_dot_clickhouse_dot_v1_dot_cluster__service__pb2.DeleteClusterExternalDictionaryRequest.SerializeToString, response_deserializer=yandex_dot_cloud_dot_operation_dot_operation__pb2.Operation.FromString, ) class ClusterServiceServicer(object): def Get(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def List(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Create(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Update(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Delete(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Start(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Stop(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Move(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AddZookeeper(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Backup(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def Restore(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def RescheduleMaintenance(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListLogs(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def StreamLogs(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListOperations(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListBackups(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListHosts(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AddHosts(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DeleteHosts(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetShard(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListShards(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def AddShard(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def UpdateShard(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DeleteShard(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def GetShardGroup(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def ListShardGroups(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateShardGroup(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def UpdateShardGroup(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!')
MIT License
felixriese/susi
susi/SOMEstimator.py
SOMEstimator.predict
python
def predict( self, X: Sequence, y: Optional[Sequence] = None ) -> List[float]: check_is_fitted(self, ["X_", "y_"]) X = check_array(X, dtype=np.float64) y_pred_list = [] for dp in tqdm(X, desc="predict", **self.tqdm_params_): y_pred_list.append(self._calc_estimation_output(dp, proba=False)) y_pred = np.array(y_pred_list) return y_pred
Predict output of data X. Parameters ---------- X : array-like matrix of shape = [n_samples, n_features] The prediction input samples. y : None, optional Ignored. Returns ------- y_pred : list of float List of predicted values. Examples -------- Fit the SOM on your data `X, y`: >>> import susi >>> som = susi.SOMClassifier() >>> som.fit(X, y) >>> y_pred = som.predict(X)
https://github.com/felixriese/susi/blob/fdac3651cb6e9c0aad4281e56633ce8f7cb135d5/susi/SOMEstimator.py#L255-L291
from abc import ABC, abstractmethod from typing import List, Optional, Sequence, Tuple, Union import numpy as np from sklearn.base import BaseEstimator from sklearn.utils.validation import check_array, check_is_fitted from tqdm import tqdm from .SOMClustering import SOMClustering from .SOMUtils import check_estimation_input, modify_weight_matrix_online class SOMEstimator(SOMClustering, BaseEstimator, ABC): def __init__( self, n_rows: int = 10, n_columns: int = 10, *, init_mode_unsupervised: str = "random", init_mode_supervised: str = "random", n_iter_unsupervised: int = 1000, n_iter_supervised: int = 1000, train_mode_unsupervised: str = "online", train_mode_supervised: str = "online", neighborhood_mode_unsupervised: str = "linear", neighborhood_mode_supervised: str = "linear", learn_mode_unsupervised: str = "min", learn_mode_supervised: str = "min", distance_metric: str = "euclidean", learning_rate_start: float = 0.5, learning_rate_end: float = 0.05, nbh_dist_weight_mode: str = "pseudo-gaussian", missing_label_placeholder: Optional[Union[int, str]] = None, n_jobs: Optional[int] = None, random_state=None, verbose: Optional[int] = 0, ) -> None: super().__init__( n_rows=n_rows, n_columns=n_columns, init_mode_unsupervised=init_mode_unsupervised, n_iter_unsupervised=n_iter_unsupervised, train_mode_unsupervised=train_mode_unsupervised, neighborhood_mode_unsupervised=neighborhood_mode_unsupervised, learn_mode_unsupervised=learn_mode_unsupervised, distance_metric=distance_metric, learning_rate_start=learning_rate_start, learning_rate_end=learning_rate_end, nbh_dist_weight_mode=nbh_dist_weight_mode, n_jobs=n_jobs, random_state=random_state, verbose=verbose, ) self.init_mode_supervised = init_mode_supervised self.n_iter_supervised = n_iter_supervised self.train_mode_supervised = train_mode_supervised self.neighborhood_mode_supervised = neighborhood_mode_supervised self.learn_mode_supervised = learn_mode_supervised self.missing_label_placeholder = missing_label_placeholder @abstractmethod def _init_super_som(self) -> None: return None def fit(self, X: Sequence, y: Optional[Sequence] = None): X, y = check_estimation_input(X, y) self.X_: np.ndarray = X self.y_: np.ndarray = y self.n_features_in_ = self.X_.shape[1] return self._fit_estimator() def _fit_estimator(self): np.random.seed(seed=self.random_state) if self.missing_label_placeholder is None: self.labeled_indices_ = list(range(len(self.y_))) self.sample_weights_ = np.full( fill_value=1.0, shape=(len(self.X_), 1) ) else: self.labeled_indices_ = np.where( self.y_ != self.missing_label_placeholder )[0] unlabeled_weight = max( len(self.labeled_indices_) / len(self.y_), 0.1 ) self.sample_weights_ = np.full( fill_value=unlabeled_weight, shape=(len(self.X_), 1) ) self.sample_weights_[self.labeled_indices_] = 1.0 self._train_unsupervised_som() self._train_supervised_som() self.fitted_ = True return self
BSD 3-Clause New or Revised License
yelp/mycroft
mycroft/sherlock/batch/ingest_multiple_dates.py
ParallelEtStepper._setup_pool_size
python
def _setup_pool_size(self, cl_args, cpus): if cl_args.pool_size < 1: self.pool_size = 1 elif cl_args.pool_size > cpus and not cl_args.exceed_max_processes: self.pool_size = cpus else: self.pool_size = cl_args.pool_size date_span = (self.end - self.start).days + 1 self.pool_size = min(self.pool_size, date_span)
setup_pool_size uses the pool_size and exceed_max_processes input from the command line, the number of cpus in the input argument, and the date span found from the constructor to determine a pool size. This is broken out so it can be nicely unit tested. Args: cl_args -- an Argparse.namespace from the command line arguments cpus -- an integer number of cpus Sets: self.pool_size -- the number of processes used in parallelization of the ET step
https://github.com/yelp/mycroft/blob/9c6185891aa367bb85a179bff35fbf4501ddf848/mycroft/sherlock/batch/ingest_multiple_dates.py#L333-L356
import argparse import copy from datetime import datetime from datetime import timedelta import itertools from multiprocessing import cpu_count from multiprocessing import Pool from multiprocessing import ProcessError import staticconf as sc import sys import time import traceback from sherlock.common.config_util import load_package_config from sherlock.batch.s3_to_psv import s3_to_psv_main from sherlock.batch.s3_to_redshift import s3_to_redshift_main from sherlock.common.pipeline import add_load_args from sherlock.common.pipeline import get_base_parser from sherlock.common.pipeline import load_io_yaml_from_args class KeyboardInterruptError(Exception): pass class DateAction(argparse.Action): def __init__(self, *args, **kwargs): argparse.Action.__init__(self, *args, **kwargs) def __call__(self, parser, namespace, values, option_string=None): try: setattr(namespace, self.dest, datetime.strptime(values, "%Y-%m-%d")) except ValueError as value_error: raise argparse.ArgumentError(self, value_error.args[0]) class ETLStep(object): step_type_to_command = { 'et': s3_to_psv_main, 'load': s3_to_redshift_main } def __init__(self, input_args, step_date, step_type): self.input_args = input_args self.step_date = step_date self.step_type = step_type self._add_date_to_input_args() def __repr__(self): return "{0}(type: {1} date: {2})".format( self.__class__.__name__, self.step_type, self.step_date ) def _add_date_to_input_args(self): parser = argparse.ArgumentParser() parser.add_argument('--date') namespace_copy = copy.deepcopy(self.input_args) self.input_args = parser.parse_args( args=['--date', self.step_date], namespace=namespace_copy ) def execute(self): print self if not self.input_args.dry_run: self.step_type_to_command[self.step_type](self.input_args) class ETLStepper(object): def __init__(self, input_args): self.start = input_args.start_date self.end = input_args.end_date self.poll_interval_seconds = input_args.load_polling_interval if self.poll_interval_seconds < 1: self._try_load_step = self._load_step else: self._try_load_step = self._polling_load_step self.input_args = input_args self._check_dates() self.et_generator = None self.load_generator = None def _check_dates(self): if self.start > self.end: temp = self.start self.start = self.end self.end = temp def _construct_step(self, days_from_start, step_type): step_date = (self.start + timedelta(days=days_from_start)).strftime("%Y-%m-%d") return ETLStep(self.input_args, step_date, step_type) def _get_step_type_imap(self, step_type): return itertools.imap( lambda days_from_start: self._construct_step( days_from_start, step_type ), xrange((self.end - self.start).days + 1) ) def _construct_etl_generator(self, generator_type=None): if generator_type == 'et': self.et_generator = self._get_step_type_imap('et') elif generator_type == 'load': self.load_generator = self._get_step_type_imap('load') else: self.et_generator = self._get_step_type_imap('et') self.load_generator = self._get_step_type_imap('load') def execute_et_steps(self): self._construct_etl_generator('et') results = [] for step in self.et_generator: step_result = _init_step_result(step) try: step.execute() step_result['status'] = 'success' except (KeyboardInterrupt, KeyboardInterruptError): raise except Exception: step_result['status'] = 'error' step_result['error_info'] = _capture_error_info() raise ProcessError( {'results': results, 'failures': [step_result]} ) finally: step_result['end_time'] = time.time() results.append(step_result) return results def execute_load_steps(self): self._construct_etl_generator('load') results = [] for step in self.load_generator: step_result = _init_step_result(step) try: self._try_load_step(step) step_result['status'] = 'success' except (KeyboardInterrupt, KeyboardInterruptError): raise except Exception: step_result['status'] = 'error' step_result['error_info'] = _capture_error_info() raise ProcessError( {'results': results, 'failures': [step_result]} ) finally: step_result['end_time'] = time.time() results.append(step_result) return results def _load_step(self, step): step.execute() def _polling_load_step(self, step): while True: try: step.execute() break except IOError as io_error: print repr(io_error) print "sleeping {0} minutes".format( self.poll_interval_seconds / 60 ) time.sleep(self.poll_interval_seconds) def _capture_error_info(): exc_type, exc_value, exc_tb = sys.exc_info() return { 'crash_tb': ''.join(traceback.format_tb(exc_tb)), 'crash_exc': traceback.format_exception_only( exc_type, exc_value )[0].strip(), } def _init_step_result(step_object): return { 'status': 'unknown', 'date': step_object.step_date, 'start_time': time.time(), 'end_time': None, 'type': step_object.step_type, 'error_info': {}, 'repr': repr(step_object) } def _executor(step_object): step_result = _init_step_result(step_object) try: step_object.execute() step_result['status'] = 'success' except KeyboardInterrupt: raise KeyboardInterruptError() except: step_result['status'] = 'error' step_result['error_info'] = _capture_error_info() pass finally: step_result['end_time'] = time.time() return step_result class ParallelEtStepper(ETLStepper): def __init__(self, input_args): super(ParallelEtStepper, self).__init__(input_args) cpus = cpu_count() self.pool_size = None self._setup_pool_size(input_args, cpus)
MIT License
xanaduai/strawberryfields
strawberryfields/circuitdrawer.py
Circuit._d
python
def _d(self, wire): self._single_mode_gate(wire, D_COMP)
Adds a displacement operator to the circuit. Args: wire (int): the subsystem wire to apply the operator to.
https://github.com/xanaduai/strawberryfields/blob/c1eed81a93419cb9c28a6ca205925691063722ce/strawberryfields/circuitdrawer.py#L249-L255
import datetime import os DOCUMENT_CLASS = r"\documentclass{article}" EMPTY_PAGESTYLE = r"\pagestyle{empty}" QCIRCUIT_PACKAGE = r"\usepackage{qcircuit}" BEGIN_DOCUMENT = r"\begin{document}" DOCUMENT_END = r"\end{document}" CIRCUIT_START = r"\Qcircuit" COLUMN_SPACING = "@C={0}" ROW_SPACING = "@R={0}" UNIFORM_ROW_SPACING = "@!R" UNIFORM_COLUMN_SPACING = "@!C" UNIFORM_ELEMENT_SPACING = "@!" QUANTUM_WIRE = r"\qw" MULTI_QUANTUM_WIRE = r"\qw[{0}]" VERTICAL_QUANTUM_WIRE = r"\qwx[{0}]" WIRE_END = r"\qwa[{0}]" CLASSICAL_WIRE = r"\cw[{0}]" CLASSICAL_WIRE_END = r"\cwa[{0}]" VERTICAL_CLASSICAL_WIRE = r"\cwx[{0}]" LABELLED_GATE = r"\gate{{{0}}}" TARGET = r"\targ" SWAP = r"\qswap" MULTIGATE = r"\multigate{{{0}}}{{{1}}}" NON_ADJACENT_MULTIGATE = r"\sgate{{{0}}}{{{1}}}" GHOST = r"\ghost{{{0}}}" CLASSICAL_GHOST = r"\cghost{{{0}}}" NO_GHOST = r"\nghost{{{0}}}" CONTROL = r"\ctrl{{{0}}}" CONTROL_ON_ZERO = r"\ctrlo{{{0}}}" CLASSICAL_CONTROL = r"\cctrl{{{0}}}" CLASSICAL_CONTROL_ON_ZERO = r"\cctrlo{{{0}}}" ISOLATED_CONTROL = r"\control" ISOLATED_CONTROL_ON_ZERO = r"\controlo" METER = r"\meter" BASIS_METER = r"\meterB{{{0}}}" SPLIT_BASIS_METER = r"\smeterB{{{0}}}{{{1}}}" MEASURE = r"\measuretab{{{0}}}" MULTIMEASURE = r"\multimeasure{{{0}}}{{{1}}}" LEFT_WIRE_LABEL = r"\lstick{{{0}}}" RIGHT_WIRE_LABEL = r"\rstick{{{0}}}" BRA = r"\bra{{{0}}}" KET = r"\ket{{{0}}}" HADAMARD_COMP = LABELLED_GATE.format("H") PAULI_X_COMP = LABELLED_GATE.format("X") PAULI_Y_COMP = LABELLED_GATE.format("Y") PAULI_Z_COMP = LABELLED_GATE.format("Z") D_COMP = LABELLED_GATE.format("D") S_COMP = LABELLED_GATE.format("S") R_COMP = LABELLED_GATE.format("R") P_COMP = LABELLED_GATE.format("P") V_COMP = LABELLED_GATE.format("V") K_COMP = LABELLED_GATE.format("K") FOURIER_COMP = LABELLED_GATE.format("F") BS_MULTI_COMP = "BS" S_MULTI_COMP = "S" WIRE_OPERATION = "& {0}" WIRE_TERMINATOR = r"\\" + "\n" CIRCUIT_BODY_TERMINATOR = "}\n" CIRCUIT_BODY_START = " {" + "\n" INIT_DOCUMENT = ( DOCUMENT_CLASS + "\n" + EMPTY_PAGESTYLE + "\n" + QCIRCUIT_PACKAGE + "\n" + BEGIN_DOCUMENT + "\n" + CIRCUIT_START ) PIPE = "|" LINE_RETURN = "\n" class NotDrawableException(Exception): pass class ModeMismatchException(Exception): pass class UnsupportedGateException(Exception): pass class Circuit: _circuit_matrix = [] def __init__(self, wires): self._document = "" self._circuit_matrix = [[QUANTUM_WIRE.format(1)] for wire in range(wires)] self._column_spacing = None self._row_spacing = None self.single_mode_gates = { "Xgate": self._x, "Zgate": self._z, "Dgate": self._d, "Sgate": self._s, "Rgate": self._r, "Pgate": self._p, "Vgate": self._v, "Kgate": self._k, "Fourier": self._fourier, } self.two_mode_gates = { "CXgate": self._cx, "CZgate": self._cz, "CKgate": self._ck, "BSgate": self._bs, "S2gate": self._s2, } def _gate_from_operator(self, op): operator = str(op).split(PIPE)[0] method = None mode = None for two_mode_gate in self.two_mode_gates: if two_mode_gate in operator: method = self.two_mode_gates[two_mode_gate] mode = 2 if method is None: for single_mode_gate in self.single_mode_gates: if single_mode_gate in operator: method = self.single_mode_gates[single_mode_gate] mode = 1 return method, mode def parse_op(self, op): if not op.__class__.__name__ == "Command": return method, mode = self._gate_from_operator(op) wires = list(map(lambda register: register.ind, op.reg)) if method is None: raise UnsupportedGateException( "Unsupported operation {0} not printable by circuit builder!".format(str(op)) ) if mode == len(wires): method(*wires) else: raise ModeMismatchException( "{0} mode gate applied to {1} wires!".format(mode, len(wires)) ) def _x(self, wire): self._single_mode_gate(wire, PAULI_X_COMP) def _z(self, wire): self._single_mode_gate(wire, PAULI_Z_COMP) def _s(self, wire): self._single_mode_gate(wire, S_COMP)
Apache License 2.0
haoyuhu/bert-multi-gpu
run_custom_classifier_mlabel.py
file_based_convert_examples_to_features
python
def file_based_convert_examples_to_features( examples, max_seq_length, tokenizer, output_file): writer = tf.python_io.TFRecordWriter(output_file) for (ex_index, example) in enumerate(examples): if ex_index % 10000 == 0: tf.logging.info("Writing example %d of %d" % (ex_index, len(examples))) feature = convert_single_example(ex_index, example, max_seq_length, tokenizer) def create_int_feature(values): f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) return f features = collections.OrderedDict() features["input_ids"] = create_int_feature(feature.input_ids) features["input_mask"] = create_int_feature(feature.input_mask) features["segment_ids"] = create_int_feature(feature.segment_ids) if isinstance(feature.label_ids, list): label_ids = feature.label_ids else: label_ids = feature.label_ids[0] features["label_ids"] = create_int_feature(label_ids) features["is_real_example"] = create_int_feature( [int(feature.is_real_example)]) tf_example = tf.train.Example(features=tf.train.Features(feature=features)) writer.write(tf_example.SerializeToString()) writer.close()
Convert a set of `InputExample`s to a TFRecord file.
https://github.com/haoyuhu/bert-multi-gpu/blob/66dc4ec8af70ec13e44749dae2ca8e5928f4b02e/run_custom_classifier_mlabel.py#L375-L408
from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import csv import json import os import numpy as np import tensorflow as tf from tensorflow.python.distribute.cross_device_ops import AllReduceCrossDeviceOps from tensorflow.python.estimator.estimator import Estimator from tensorflow.python.estimator.run_config import RunConfig import custom_optimization import modeling import optimization import tokenization flags = tf.flags FLAGS = flags.FLAGS flags.DEFINE_integer( "num_labels", 10, "Number of labels in dataset for multi-label classification") flags.DEFINE_string( "data_dir", None, "The input data dir. Should contain the .tsv files (or other data files) " "for the task.") flags.DEFINE_string( "bert_config_file", None, "The config json file corresponding to the pre-trained BERT model. " "This specifies the model architecture.") flags.DEFINE_string("task_name", None, "The name of the task to train.") flags.DEFINE_string("vocab_file", None, "The vocabulary file that the BERT model was trained on.") flags.DEFINE_string( "output_dir", None, "The output directory where the model checkpoints will be written.") flags.DEFINE_string( "init_checkpoint", None, "Initial checkpoint (usually from a pre-trained BERT model).") flags.DEFINE_bool( "do_lower_case", True, "Whether to lower case the input text. Should be True for uncased " "models and False for cased models.") flags.DEFINE_integer( "max_seq_length", 128, "The maximum total input sequence length after WordPiece tokenization. " "Sequences longer than this will be truncated, and sequences shorter " "than this will be padded.") flags.DEFINE_bool("do_train", False, "Whether to run training.") flags.DEFINE_bool("do_eval", False, "Whether to run eval on the dev set.") flags.DEFINE_bool( "do_predict", False, "Whether to run the model in inference mode on the test set.") flags.DEFINE_bool( "save_for_serving", False, "Whether to save the model for tensorflow serving.") flags.DEFINE_integer("train_batch_size", 32, "Total batch size for training.") flags.DEFINE_integer("eval_batch_size", 8, "Total batch size for eval.") flags.DEFINE_integer("predict_batch_size", 8, "Total batch size for predict.") flags.DEFINE_float("learning_rate", 5e-5, "The initial learning rate for Adam.") flags.DEFINE_float("num_train_epochs", 3.0, "Total number of training epochs to perform.") flags.DEFINE_float( "warmup_proportion", 0.1, "Proportion of training to perform linear learning rate warmup for. " "E.g., 0.1 = 10% of training.") flags.DEFINE_integer("save_checkpoints_steps", 10000, "How often to save the model checkpoint.") flags.DEFINE_integer("iterations_per_loop", 10000, "How many steps to make in each estimator call.") flags.DEFINE_bool("use_tpu", False, "Whether to use TPU or GPU/CPU.") tf.flags.DEFINE_string( "tpu_name", None, "The Cloud TPU to use for training. This should be either the name " "used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 " "url.") tf.flags.DEFINE_string( "tpu_zone", None, "[Optional] GCE zone where the Cloud TPU is located in. If not " "specified, we will attempt to automatically detect the GCE project from " "metadata.") tf.flags.DEFINE_string( "gcp_project", None, "[Optional] Project name for the Cloud TPU-enabled project. If not " "specified, we will attempt to automatically detect the GCE project from " "metadata.") tf.flags.DEFINE_string("master", None, "[Optional] TensorFlow master URL.") flags.DEFINE_integer( "num_tpu_cores", 8, "Only used if `use_tpu` is True. Total number of TPU cores to use.") flags.DEFINE_bool("use_gpu", False, "Whether to use GPU.") flags.DEFINE_integer( "num_gpu_cores", 0, "Only used if `use_gpu` is True. Total number of GPU cores to use." ) flags.DEFINE_bool("use_fp16", False, "Whether to use fp16.") class InputExample(object): def __init__(self, guid, text_a, text_b=None, labels=None): self.guid = guid self.text_a = text_a self.text_b = text_b self.labels = labels class PaddingInputExample(object): class InputFeatures(object): def __init__(self, input_ids, input_mask, segment_ids, label_ids, is_real_example=True): self.input_ids = input_ids self.input_mask = input_mask self.segment_ids = segment_ids self.label_ids = label_ids self.is_real_example = is_real_example class DataProcessor(object): def get_train_examples(self, data_dir): raise NotImplementedError() def get_dev_examples(self, data_dir): raise NotImplementedError() def get_test_examples(self, data_dir): raise NotImplementedError() def get_labels(self): raise NotImplementedError() @classmethod def _read_tsv(cls, input_file, quotechar=None): with tf.gfile.Open(input_file, "r") as f: reader = csv.reader(f, delimiter="\t", quotechar=quotechar) lines = [] for line in reader: lines.append(line) return lines class MlabelProcessor(DataProcessor): def get_train_examples(self, data_dir): return self._create_examples( self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") def get_dev_examples(self, data_dir): return self._create_examples( self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev") def get_test_examples(self, data_dir): return self._create_examples( self._read_tsv(os.path.join(data_dir, "test.tsv")), "test") def _create_examples(self, lines, set_type): examples = [] for (i, line) in enumerate(lines): if i == 0: continue guid = "%s-%s" % (set_type, i) text_a = tokenization.convert_to_unicode(line[1]) if set_type == "test": labels = ["0"] * NUM_LABELS else: labels = line[2:] examples.append( InputExample(guid=guid, text_a=text_a, text_b=None, labels=labels)) return examples def convert_single_example(ex_index, example, max_seq_length, tokenizer): if isinstance(example, PaddingInputExample): return InputFeatures( input_ids=[0] * max_seq_length, input_mask=[0] * max_seq_length, segment_ids=[0] * max_seq_length, label_id=0, is_real_example=False) tokens_a = tokenizer.tokenize(example.text_a) tokens_b = None if example.text_b: tokens_b = tokenizer.tokenize(example.text_b) if tokens_b: _truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3) else: if len(tokens_a) > max_seq_length - 2: tokens_a = tokens_a[0:(max_seq_length - 2)] tokens = [] segment_ids = [] tokens.append("[CLS]") segment_ids.append(0) for token in tokens_a: tokens.append(token) segment_ids.append(0) tokens.append("[SEP]") segment_ids.append(0) if tokens_b: for token in tokens_b: tokens.append(token) segment_ids.append(1) tokens.append("[SEP]") segment_ids.append(1) input_ids = tokenizer.convert_tokens_to_ids(tokens) input_mask = [1] * len(input_ids) while len(input_ids) < max_seq_length: input_ids.append(0) input_mask.append(0) segment_ids.append(0) assert len(input_ids) == max_seq_length assert len(input_mask) == max_seq_length assert len(segment_ids) == max_seq_length label_ids = [] for label in example.labels: label_ids.append(int(label)) if ex_index < 5: tf.logging.info("*** Example ***") tf.logging.info("guid: %s" % (example.guid)) tf.logging.info("tokens: %s" % " ".join( [tokenization.printable_text(x) for x in tokens])) tf.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) tf.logging.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) tf.logging.info("segment_ids: %s" % " ".join([str(x) for x in segment_ids])) tf.logging.info("label: %s (id = %s)" % (example.labels, label_ids)) feature = InputFeatures( input_ids=input_ids, input_mask=input_mask, segment_ids=segment_ids, label_ids=label_ids, is_real_example=True) return feature
Apache License 2.0
numba/numba
numba/np/ufunc/ufuncbuilder.py
UFuncBuilder._finalize_signature
python
def _finalize_signature(self, cres, args, return_type): return _finalize_ufunc_signature(cres, args, return_type)
Slated for deprecation, use ufuncbuilder._finalize_ufunc_signature() instead.
https://github.com/numba/numba/blob/8d4559a83b7b12da9121c030b8e3780874204a34/numba/np/ufunc/ufuncbuilder.py#L265-L269
import inspect from contextlib import contextmanager from numba.core import config, targetconfig from numba.core.decorators import jit from numba.core.descriptors import TargetDescriptor from numba.core.options import TargetOptions, include_default_options from numba.core.registry import cpu_target from numba.core.target_extension import dispatcher_registry, target_registry from numba.core import utils, types, serialize, compiler, sigutils from numba.np.numpy_support import as_dtype from numba.np.ufunc import _internal from numba.np.ufunc.sigparse import parse_signature from numba.np.ufunc.wrappers import build_ufunc_wrapper, build_gufunc_wrapper from numba.core.caching import FunctionCache, NullCache from numba.core.compiler_lock import global_compiler_lock _options_mixin = include_default_options( "nopython", "forceobj", "boundscheck", "fastmath", "target_backend", ) class UFuncTargetOptions(_options_mixin, TargetOptions): def finalize(self, flags, options): if not flags.is_set("enable_pyobject"): flags.enable_pyobject = True if not flags.is_set("enable_looplift"): flags.enable_looplift = True flags.inherit_if_not_set("nrt", default=True) if not flags.is_set("debuginfo"): flags.debuginfo = config.DEBUGINFO_DEFAULT if not flags.is_set("boundscheck"): flags.boundscheck = flags.debuginfo flags.enable_pyobject_looplift = True flags.inherit_if_not_set("fastmath") class UFuncTarget(TargetDescriptor): options = UFuncTargetOptions def __init__(self): super().__init__('ufunc') @property def typing_context(self): return cpu_target.typing_context @property def target_context(self): return cpu_target.target_context ufunc_target = UFuncTarget() class UFuncDispatcher(serialize.ReduceMixin): targetdescr = ufunc_target def __init__(self, py_func, locals={}, targetoptions={}): self.py_func = py_func self.overloads = utils.UniqueDict() self.targetoptions = targetoptions self.locals = locals self.cache = NullCache() def _reduce_states(self): return dict( pyfunc=self.py_func, locals=self.locals, targetoptions=self.targetoptions, ) @classmethod def _rebuild(cls, pyfunc, locals, targetoptions): return cls(py_func=pyfunc, locals=locals, targetoptions=targetoptions) def enable_caching(self): self.cache = FunctionCache(self.py_func) def compile(self, sig, locals={}, **targetoptions): locs = self.locals.copy() locs.update(locals) topt = self.targetoptions.copy() topt.update(targetoptions) flags = compiler.Flags() self.targetdescr.options.parse_as_flags(flags, topt) flags.no_cpython_wrapper = True flags.error_model = "numpy" flags.enable_looplift = False return self._compile_core(sig, flags, locals) def _compile_core(self, sig, flags, locals): typingctx = self.targetdescr.typing_context targetctx = self.targetdescr.target_context @contextmanager def store_overloads_on_success(): try: yield except Exception: raise else: exists = self.overloads.get(cres.signature) if exists is None: self.overloads[cres.signature] = cres with global_compiler_lock: with targetconfig.ConfigStack().enter(flags.copy()): with store_overloads_on_success(): cres = self.cache.load_overload(sig, targetctx) if cres is not None: return cres args, return_type = sigutils.normalize_signature(sig) cres = compiler.compile_extra(typingctx, targetctx, self.py_func, args=args, return_type=return_type, flags=flags, locals=locals) self.cache.save_overload(sig, cres) return cres dispatcher_registry[target_registry['npyufunc']] = UFuncDispatcher def _compile_element_wise_function(nb_func, targetoptions, sig): cres = nb_func.compile(sig, **targetoptions) args, return_type = sigutils.normalize_signature(sig) return cres, args, return_type def _finalize_ufunc_signature(cres, args, return_type): if return_type is None: if cres.objectmode: raise TypeError("return type must be specified for object mode") else: return_type = cres.signature.return_type assert return_type != types.pyobject return return_type(*args) def _build_element_wise_ufunc_wrapper(cres, signature): ctx = cres.target_context library = cres.library fname = cres.fndesc.llvm_func_name with global_compiler_lock: info = build_ufunc_wrapper(library, ctx, fname, signature, cres.objectmode, cres) ptr = info.library.get_pointer_to_function(info.name) dtypenums = [as_dtype(a).num for a in signature.args] dtypenums.append(as_dtype(signature.return_type).num) return dtypenums, ptr, cres.environment _identities = { 0: _internal.PyUFunc_Zero, 1: _internal.PyUFunc_One, None: _internal.PyUFunc_None, "reorderable": _internal.PyUFunc_ReorderableNone, } def parse_identity(identity): try: identity = _identities[identity] except KeyError: raise ValueError("Invalid identity value %r" % (identity,)) return identity class _BaseUFuncBuilder(object): def add(self, sig=None): if hasattr(self, 'targetoptions'): targetoptions = self.targetoptions else: targetoptions = self.nb_func.targetoptions cres, args, return_type = _compile_element_wise_function( self.nb_func, targetoptions, sig) sig = self._finalize_signature(cres, args, return_type) self._sigs.append(sig) self._cres[sig] = cres return cres def disable_compile(self): class UFuncBuilder(_BaseUFuncBuilder): def __init__(self, py_func, identity=None, cache=False, targetoptions={}): self.py_func = py_func self.identity = parse_identity(identity) self.nb_func = jit(_target='npyufunc', cache=cache, **targetoptions)(py_func) self._sigs = [] self._cres = {}
BSD 2-Clause Simplified License
ninthdevilhaunster/arknightsautohelper
vendor/penguin_client/penguin_client/models/notice.py
Notice.existence
python
def existence(self, existence): self._existence = existence
Sets the existence of this Notice. :param existence: The existence of this Notice. # noqa: E501 :type: dict(str, Existence)
https://github.com/ninthdevilhaunster/arknightsautohelper/blob/d24b4e22a73b333c1acc152556566efad4e94c04/vendor/penguin_client/penguin_client/models/notice.py#L118-L126
import pprint import re import six class Notice(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'conditions': 'ExistConditions', 'content_i18n': 'dict(str, str)', 'existence': 'dict(str, Existence)', 'severity': 'int' } attribute_map = { 'conditions': 'conditions', 'content_i18n': 'content_i18n', 'existence': 'existence', 'severity': 'severity' } def __init__(self, conditions=None, content_i18n=None, existence=None, severity=None): self._conditions = None self._content_i18n = None self._existence = None self._severity = None self.discriminator = None if conditions is not None: self.conditions = conditions if content_i18n is not None: self.content_i18n = content_i18n if existence is not None: self.existence = existence if severity is not None: self.severity = severity @property def conditions(self): return self._conditions @conditions.setter def conditions(self, conditions): self._conditions = conditions @property def content_i18n(self): return self._content_i18n @content_i18n.setter def content_i18n(self, content_i18n): self._content_i18n = content_i18n @property def existence(self): return self._existence @existence.setter
MIT License
jhl-hust/ibcln
data/resize_natural_3_dataset.py
ResizeNatural3Dataset.__len__
python
def __len__(self): if self.opt.dataset_size == 0 or self.opt.phase == 'test': length = max(self.A1_size, self.A2_size, self.B_size) else: length = self.opt.dataset_size return length
Return the total number of images.
https://github.com/jhl-hust/ibcln/blob/66056ffd83e873536cf26b76fa9532a40cbfa7fa/data/resize_natural_3_dataset.py#L164-L170
import os.path from data.base_dataset import BaseDataset, get_transform from data.image_folder import make_dataset from PIL import Image import random import torchvision.transforms as transforms import numpy as np class RandomCrop(object): def __init__(self, output_size): assert isinstance(output_size, (int, tuple)) if isinstance(output_size, int): self.output_size = (output_size, output_size) else: assert len(output_size) == 2 self.output_size = output_size def __call__(self, sample): image, landmarks = sample['I'], sample['T'] h, w = image.shape[:2] min_a = min(h, w) self.output_size = (min_a * 7 // 10, min_a * 7 // 10) new_h, new_w = self.output_size top = np.random.randint(0, h - new_h) left = np.random.randint(0, w - new_w) image = image[top: top + new_h, left: left + new_w] landmarks = landmarks[top: top + new_h, left: left + new_w] return {'I': image, 'T': landmarks} class ResizeNatural3Dataset(BaseDataset): def __init__(self, opt): BaseDataset.__init__(self, opt) if opt.phase == 'train': self.natural_dir_A1 = os.path.join(opt.dataroot, 'natural_' + 'T') self.natural_dir_B = os.path.join(opt.dataroot, 'natural_' + 'I') self.natural_A1_paths = sorted(make_dataset(self.natural_dir_A1, opt.max_dataset_size)) self.natural_B_paths = sorted(make_dataset(self.natural_dir_B, opt.max_dataset_size)) self.natural_size = len(self.natural_A1_paths) self.crop = RandomCrop(opt.load_size) self.dir_A1 = os.path.join(opt.dataroot, opt.phase + 'A1') self.dir_A2 = os.path.join(opt.dataroot, opt.phase + 'A2') self.dir_B = os.path.join(opt.dataroot, opt.phase + 'B') self.A1_paths = sorted(make_dataset(self.dir_A1, opt.max_dataset_size)) self.A2_paths = sorted(make_dataset(self.dir_A2, opt.max_dataset_size)) if not opt.phase == 'train': self.B_paths = sorted(make_dataset(self.dir_B, opt.max_dataset_size)) self.B_size = len(self.B_paths) self.A1_size = len(self.A1_paths) self.A2_size = len(self.A2_paths) input_nc =self.opt.input_nc output_nc = self.opt.output_nc self.transform_A = get_transform(self.opt, grayscale=(input_nc == 1)) self.transform_B = get_transform(self.opt, grayscale=(output_nc == 1)) print(self.transform_A) self.trans2 = transforms.Compose([transforms.Resize([128, 128]), transforms.ToTensor()]) self.trans4 = transforms.Compose([transforms.Resize([64, 64]), transforms.ToTensor()]) def __getitem__(self, index): is_natural = random.random() <= 0.3 if self.opt.phase == 'train': if is_natural: natural_index = index % self.natural_size A1_path = self.natural_A1_paths[natural_index] B_path = self.natural_B_paths[natural_index] A1_img = np.asarray(Image.open(A1_path).convert('RGB')) A2_img = Image.fromarray(np.zeros_like(A1_img)) B_img = np.asarray(Image.open(B_path).convert('RGB')) imgs = self.crop({'I': B_img, 'T': A1_img}) A1_img, B_img = Image.fromarray(imgs['T']), Image.fromarray(imgs['I']) is_natural_int = 1 else: A1_path = self.A1_paths[index % self.A1_size] index_A2 = random.randint(0, self.A2_size - 1) A2_path = self.A2_paths[index_A2] B_path = '' A1_img = Image.open(A1_path).convert('RGB') A2_img = Image.open(A2_path).convert('RGB') B_img = Image.fromarray(np.zeros_like(A1_img)) is_natural_int = 0 else: B_path = self.B_paths[index] B_img = Image.open(B_path).convert('RGB') if index < len(self.A1_paths): A1_path = self.A1_paths[index] A1_img = Image.open(A1_path).convert('RGB') else: A1_img = Image.fromarray(np.zeros_like(B_img)) A2_img = None is_natural_int = 1 w, h = A1_img.size neww = w // 4 * 4 newh = h // 4 * 4 resize = transforms.Resize([newh, neww]) A1_img = resize(A1_img) A2_img = resize(A2_img) if A2_img else None B_img = resize(B_img) A1 = self.transform_A(A1_img) A2 = self.transform_A(A2_img) if A2_img else None B = self.transform_B(B_img) T2 = self.trans2(A1_img) T4 = self.trans4(A1_img) if A2 is not None: return {'T': A1, 'T2': T2, 'T4': T4, 'R': A2, 'I': B, 'B_paths': B_path, 'isNatural': is_natural_int} else: return {'T': A1, 'T2': T2, 'T4': T4, 'I': B, 'B_paths': B_path, 'isNatural': is_natural_int}
BSD 2-Clause Simplified License
kmmbvnr/django-any
django_any/forms.py
url_field_data
python
def url_field_data(field, **kwargs): urls = kwargs.get('choices', ['http://news.yandex.ru/society.html', 'http://video.google.com/?hl=en&tab=wv', 'http://www.microsoft.com/en/us/default.aspx', 'http://habrahabr.ru/company/opera/', 'http://www.apple.com/support/hardware/', 'http://localhost/', 'http://72.14.221.99', 'http://fr.wikipedia.org/wiki/France']) return random.choice(urls)
Return random value for URLField >>> result = any_form_field(forms.URLField()) >>> from django.core.validators import URLValidator >>> import re >>> re.match(URLValidator.regex, result) is not None True
https://github.com/kmmbvnr/django-any/blob/6f64ebd05476e2149e2e71deeefbb10f8edfc412/django_any/forms.py#L292-L312
import random from datetime import date, datetime, time from django import forms from django.utils import formats from django_any import xunit from django_any.functions import valid_choices, split_model_kwargs, ExtensionMethod any_form = ExtensionMethod() any_form_field = ExtensionMethod() @any_form.register_default def any_form_default(form_cls, **kwargs): form_data = {} form_files = {} form_fields, fields_args = split_model_kwargs(kwargs) for name, field in form_cls.base_fields.iteritems(): if name in form_fields: form_data[name] = kwargs[name] else: form_data[name] = any_form_field(field, **fields_args[name]) return form_data, form_files @any_form_field.decorator def field_required_attribute(function): def _wrapper(field, **kwargs): if not field.required and random.random < 0.1: return None return function(field, **kwargs) return _wrapper @any_form_field.decorator def field_choices_attibute(function): def _wrapper(field, **kwargs): if hasattr(field.widget, 'choices'): return random.choice(list(valid_choices(field.widget.choices))) return function(field, **kwargs) return _wrapper @any_form_field.register(forms.BooleanField) def boolean_field_data(field, **kwargs): return str(xunit.any_boolean()) @any_form_field.register(forms.CharField) def char_field_data(field, **kwargs): min_length = kwargs.get('min_length', 1) max_length = kwargs.get('max_length', field.max_length or 255) return xunit.any_string(min_length=field.min_length or min_length, max_length=field.max_length or max_length) @any_form_field.register(forms.DecimalField) def decimal_field_data(field, **kwargs): min_value = 0 max_value = 10 from django.core.validators import MinValueValidator, MaxValueValidator for elem in field.validators: if isinstance(elem, MinValueValidator): min_value = elem.limit_value if isinstance(elem, MaxValueValidator): max_value = elem.limit_value if (field.max_digits and field.decimal_places): from decimal import Decimal max_value = min(max_value, Decimal('%s.%s' % ('9'*(field.max_digits-field.decimal_places), '9'*field.decimal_places))) min_value = kwargs.get('min_value') or min_value max_value = kwargs.get('max_value') or max_value return str(xunit.any_decimal(min_value=min_value, max_value=max_value, decimal_places = field.decimal_places or 2)) @any_form_field.register(forms.EmailField) def email_field_data(field, **kwargs): max_length = 10 if field.max_length: max_length = (field.max_length -5) / 2 min_length = 10 if field.min_length: min_length = (field.min_length-4) / 2 return "%s@%s.%s" % ( xunit.any_string(min_length=min_length, max_length=max_length), xunit.any_string(min_length=min_length, max_length=max_length), xunit.any_string(min_length=2, max_length=3)) @any_form_field.register(forms.DateField) def date_field_data(field, **kwargs): from_date = kwargs.get('from_date', date(1990, 1, 1)) to_date = kwargs.get('to_date', date.today()) date_format = random.choice(field.input_formats or formats.get_format('DATE_INPUT_FORMATS')) return xunit.any_date(from_date=from_date, to_date=to_date).strftime(date_format) @any_form_field.register(forms.DateTimeField) def datetime_field_data(field, **kwargs): from_date = kwargs.get('from_date', datetime(1990, 1, 1)) to_date = kwargs.get('to_date', datetime.today()) date_format = random.choice(field.input_formats or formats.get_format('DATETIME_INPUT_FORMATS')) return xunit.any_datetime(from_date=from_date, to_date=to_date).strftime(date_format) @any_form_field.register(forms.FloatField) def float_field_data(field, **kwargs): min_value = 0 max_value = 100 from django.core.validators import MinValueValidator, MaxValueValidator for elem in field.validators: if isinstance(elem, MinValueValidator): min_value = elem.limit_value if isinstance(elem, MaxValueValidator): max_value = elem.limit_value min_value = kwargs.get('min_value', min_value) max_value = kwargs.get('max_value', max_value) precision = kwargs.get('precision', 3) return str(xunit.any_float(min_value=min_value, max_value=max_value, precision=precision)) @any_form_field.register(forms.IntegerField) def integer_field_data(field, **kwargs): min_value = 0 max_value = 100 from django.core.validators import MinValueValidator, MaxValueValidator for elem in field.validators: if isinstance(elem, MinValueValidator): min_value = elem.limit_value if isinstance(elem, MaxValueValidator): max_value = elem.limit_value min_value = kwargs.get('min_value', min_value) max_value = kwargs.get('max_value', max_value) return str(xunit.any_int(min_value=min_value, max_value=max_value)) @any_form_field.register(forms.IPAddressField) def ipaddress_field_data(field, **kwargs): choices = kwargs.get('choices') if choices: return random.choice(choices) else: nums = [str(xunit.any_int(min_value=0, max_value=255)) for _ in xrange(0, 4)] return ".".join(nums) @any_form_field.register(forms.NullBooleanField) def null_boolean_field_data(field, **kwargs): return random.choice(['None', 'True', 'False']) @any_form_field.register(forms.SlugField) def slug_field_data(field, **kwargs): min_length = kwargs.get('min_length', 1) max_length = kwargs.get('max_length', field.max_length or 20) from string import ascii_letters, digits letters = ascii_letters + digits + '_-' return xunit.any_string(letters = letters, min_length = min_length, max_length = max_length) @any_form_field.register(forms.URLField)
MIT License
alexa/alexa-apis-for-python
ask-smapi-model/ask_smapi_model/v0/event_schema/alexa_development_event/manifest_update.py
ManifestUpdate.__init__
python
def __init__(self, timestamp=None, request_id=None, payload=None): self.__discriminator_value = "AlexaDevelopmentEvent.ManifestUpdate" self.event_name = self.__discriminator_value super(ManifestUpdate, self).__init__(timestamp=timestamp, event_name=self.__discriminator_value) self.request_id = request_id self.payload = payload
&#39;AlexaDevelopmentEvent.ManifestUpdate&#39; event represents the status of the update request on the Manifest. This event is generated when request to create a skill or update an existing skill is completed. The request may complete either with &#x60;SUCCEEDED&#x60; or &#x60;FAILED&#x60; status. :param timestamp: ISO 8601 timestamp for the instant when event was created. :type timestamp: (optional) datetime :param request_id: A development notification includes a unique identifier that identifies the original request that resulted in the development notification. The requestId for original request is returned by Amazon APIs in response&#39;s &#39;X-Amzn-RequestId&#39; header. :type request_id: (optional) str :param payload: :type payload: (optional) ask_smapi_model.v0.event_schema.skill_event_attributes.SkillEventAttributes
https://github.com/alexa/alexa-apis-for-python/blob/bfe5e694daaca71bfb1a4199ca8d2514f1cac6c9/ask-smapi-model/ask_smapi_model/v0/event_schema/alexa_development_event/manifest_update.py#L58-L74
import pprint import re import six import typing from enum import Enum from ask_smapi_model.v0.event_schema.base_schema import BaseSchema if typing.TYPE_CHECKING: from typing import Dict, List, Optional, Union, Any from datetime import datetime from ask_smapi_model.v0.event_schema.skill_event_attributes import SkillEventAttributes as SkillEventAttributes_c0873626 class ManifestUpdate(BaseSchema): deserialized_types = { 'timestamp': 'datetime', 'event_name': 'str', 'request_id': 'str', 'payload': 'ask_smapi_model.v0.event_schema.skill_event_attributes.SkillEventAttributes' } attribute_map = { 'timestamp': 'timestamp', 'event_name': 'eventName', 'request_id': 'requestId', 'payload': 'payload' } supports_multiple_types = False
Apache License 2.0
tufin/pytos
pytos/securetrack/helpers.py
Secure_Track_Helper.get_device_id_by_name
python
def get_device_id_by_name(self, device_name): match_device = None devices_list = self.get_devices_list() for device in devices_list: if device.name == device_name: if not match_device: match_device = device else: message = "More than one device with the name '{}' exists. (Device IDs are {},{})".format( device_name, match_device.id, device.id) logger.error(message) raise IndexError(message) if not match_device: message = "A device with the name '{}' does not exist.".format(device_name) logger.error(message) raise ValueError(message) else: return match_device.id
Get the device ID for a device by name :param device_name: The name for the device. :type device_name: str :return: The device ID for the device with the matching name. :rtype: int :raise ValueError: If a device with the specified name does not exist. :raise IndexError: If more than one device with the specified name exists.
https://github.com/tufin/pytos/blob/18a2f4510b2d9c43b24c4f7c41a143c2512ba1ce/pytos/securetrack/helpers.py#L198-L224
import collections import csv import io import itertools import logging import base64 import multiprocessing.pool import xml.etree.ElementTree as ET from pytos.common.definitions.Url_Params_Builder import URLParamBuilderDict from requests import RequestException from pytos.common.helpers import Secure_API_Helper from pytos.common.definitions.xml_tags import Elements, SeverityLevels from pytos.common.exceptions import REST_Not_Found_Error, REST_Bad_Request_Error, REST_Request_URI_Too_Long, REST_Client_Error, ItemAlreadyExists, REST_Internal_Server_Error, REST_HTTP_Exception from pytos.common.logging.definitions import HELPERS_LOGGER_NAME from pytos.securetrack.xml_objects.rest.cleanups import Generic_Cleanup_List from pytos.securetrack.xml_objects.rest.device import Devices_List, Device, Device_Revisions_List, GenericDevicesList, RuleSearchDeviceList, Device_Revision, InternetReferralObject from pytos.securetrack.xml_objects.rest.domain import Domains, Domain from pytos.securetrack.xml_objects.rest.nat_rules import NatRules from pytos.securetrack.xml_objects.rest.routes import RoutesList from pytos.securetrack.xml_objects.rest.rules import Rules_List, Cleanup_Set, Policy_List, Bindings_List, Interfaces_List, Topology_Interfaces_List, Policy_Analysis_Query_Result, Network_Objects_List, Services_List, Rule_Documentation, SecurityPolicyDeviceViolations, Change_Authorization from pytos.securetrack.xml_objects.rest.security_policy import SecurityPolicyExceptionList, Security_Policy_Exception, Security_Policies_List from pytos.securetrack.xml_objects.rest.topology import PathCalculationResults, TopologyCloudList from pytos.securetrack.xml_objects.rest.zones import Zone_Entries_List, Zone_List, ZoneDescendantsList, Device_Zones_List logger = logging.getLogger(HELPERS_LOGGER_NAME) DEVICE_TYPES = {"Cisco": ["asa", "pix", "fwsm", "nexus", "switch", "xr_router", "L3_switch", "router"], "Checkpoint": ["cp_cma", "cp_smrt_cntr"], "Netscreen": ["netscreen", "netscreen_cluster", "junos", "junosStateless"], "Fortinet": ["fortigate"], "PaloAltoNetworks": ["PaloAltoFW"], "Mcafee": ["mcafeeFW"], "NewF5": ["new_bigip"], "f5": ["bigip"], "bluecoat": ["proxysg"], "linux": ["iptables"]} DEFAULT_DOMAIN_ID = 1 DETACH = -1 class Secure_Track_Helper(Secure_API_Helper): CONFIG_PARSER_SECTION_NAME = "securetrack" def __init__(self, hostname, login_data, **kwargs): super().__init__(hostname, login_data, **kwargs) def get_devices_list(self, custom_params=None): logger.info("Getting SecureTrack devices list.") if custom_params: params = '&'.join('{}={}'.format(k, v) for k, v in custom_params.items()) params = '?' + params else: params = '' try: response_string = self.get_uri("/securetrack/api/devices/{}".format(params), expected_status_codes=200).response.content except RequestException: message = "Failed to GET devices list." logger.critical(message) raise IOError(message) return Devices_List.from_xml_string(response_string) def get_device_by_id(self, device_id): logger.info("Getting SecureTrack device with ID %s.", device_id) try: response_string = self.get_uri("/securetrack/api/devices/{}".format(device_id), expected_status_codes=200).response.content except REST_Not_Found_Error: message = "Device with ID {} does not exist.".format(device_id) logger.critical(message) raise ValueError(message) except RequestException: message = "Failed to GET device with ID {}.".format(device_id) logger.critical(message) raise IOError(message) return Device.from_xml_string(response_string) def get_rule_by_device_and_rule_id(self, device_id, rule_id, get_documentation=False): logger.info("Getting SecureTrack device with ID %s.", device_id) if get_documentation: rule_documentation_uri_suffix = "?add=documentation" logger.info("Getting rules with rule documentation.") else: rule_documentation_uri_suffix = "" try: response_string = self.get_uri( "/securetrack/api/devices/{}/rules/{}{}".format(device_id, rule_id, rule_documentation_uri_suffix), expected_status_codes=200).response.content except RequestException: message = "Failed to GET device with ID {}.".format(device_id) logger.critical(message) raise IOError(message) return Rules_List.from_xml_string(response_string) def get_device_revisions_by_id(self, device_id): logger.info("Getting SecureTrack revisions for device with ID %s.", device_id) try: response_string = self.get_uri("/securetrack/api/devices/{}/revisions/".format(device_id), expected_status_codes=200).response.content except REST_Not_Found_Error: message = "Device with ID {} does not exist.".format(device_id) logger.critical(message) raise ValueError(message) except RequestException: message = "Failed to GET device with ID {}.".format(device_id) logger.critical(message) raise IOError(message) return Device_Revisions_List.from_xml_string(response_string) def get_device_config_by_id(self, device_id): hard_coded_checksum_string = b"Cryptochecksum:0123456789ABCDEF]]></" logger.info("Getting SecureTrack device with ID '%s'.", device_id) try: response_string = self.get_uri("/securetrack/api/devices/{}/config".format(device_id), expected_status_codes=200).response.content except REST_Not_Found_Error: message = "Device with ID {} does not exist.".format(device_id) logger.critical(message) raise ValueError(message) except RequestException: message = "Failed to GET configuration for device with ID {}.".format(device_id) logger.critical(message) raise IOError(message) device_config_xml_string = bytes(response_string) if device_config_xml_string: device_config_xml_string = device_config_xml_string.replace( b"<" + bytes(Elements.DEVICE_CONFIG, encoding="ascii") + b"><![CDATA[", b"") device_config_xml_string = device_config_xml_string.replace( hard_coded_checksum_string + bytes(Elements.DEVICE_CONFIG, encoding="ascii") + b">\n", b"") return device_config_xml_string else: return device_config_xml_string
Apache License 2.0
tmux-python/libtmux
libtmux/window.py
Window.attached_pane
python
def attached_pane(self): for pane in self._panes: if 'pane_active' in pane: if pane.get('pane_active') == '1': return Pane(window=self, **pane) else: continue return []
Return the attached :class:`Pane`. Returns ------- :class:`Pane`
https://github.com/tmux-python/libtmux/blob/1b1066ea3c67f790f9751d6365215af4824afb48/libtmux/window.py#L489-L505
import logging import os import shlex from . import exc, formats from .common import TmuxMappingObject, TmuxRelationalObject, handle_option_error from .pane import Pane logger = logging.getLogger(__name__) class Window(TmuxMappingObject, TmuxRelationalObject): child_id_attribute = 'pane_id' formatter_prefix = 'window_' def __init__(self, session=None, **kwargs): if not session: raise ValueError('Window requires a Session, session=Session') self.session = session self.server = self.session.server if 'window_id' not in kwargs: raise ValueError('Window requires a `window_id`') self._window_id = kwargs['window_id'] def __repr__(self): return "%s(%s %s:%s, %s)" % ( self.__class__.__name__, self.id, self.index, self.name, self.session, ) @property def _info(self, *args): attrs = {'window_id': self._window_id} def by(val, *args): for key, value in attrs.items(): try: if attrs[key] != val[key]: return False except KeyError: return False return True ret = list(filter(by, self.server._windows)) if len(ret) == 0 and self.server._windows[0]['window_id'] == '@0': ret = self.server._windows return ret[0] def cmd(self, cmd, *args, **kwargs): if not any(arg.startswith('-t') for arg in args): args = ('-t', self.id) + args return self.server.cmd(cmd, *args, **kwargs) def select_layout(self, layout=None): cmd = ['select-layout', '-t%s:%s' % (self.get('session_id'), self.index)] if layout: cmd.append(layout) proc = self.cmd(*cmd) if proc.stderr: raise exc.LibTmuxException(proc.stderr) def set_window_option(self, option, value): self.server._update_windows() if isinstance(value, bool) and value: value = 'on' elif isinstance(value, bool) and not value: value = 'off' cmd = self.cmd( 'set-window-option', '-t%s:%s' % (self.get('session_id'), self.index), option, value, ) if isinstance(cmd.stderr, list) and len(cmd.stderr): handle_option_error(cmd.stderr[0]) def show_window_options(self, option=None, g=False): tmux_args = tuple() if g: tmux_args += ('-g',) if option: return self.show_window_option(option, g=g) else: tmux_args += ('show-window-options',) cmd = self.cmd(*tmux_args).stdout cmd = [tuple(shlex.split(item)) for item in cmd] window_options = dict(cmd) for key, value in window_options.items(): if value.isdigit(): window_options[key] = int(value) return window_options def show_window_option(self, option, g=False): tmux_args = tuple() if g: tmux_args += ('-g',) tmux_args += (option,) cmd = self.cmd('show-window-options', *tmux_args) if len(cmd.stderr): handle_option_error(cmd.stderr[0]) if not len(cmd.stdout): return None option = [shlex.split(item) for item in cmd.stdout][0] if option[1].isdigit(): option = (option[0], int(option[1])) return option[1] def rename_window(self, new_name): import shlex lex = shlex.shlex(new_name) lex.escape = ' ' lex.whitespace_split = False try: self.cmd('rename-window', new_name) self['window_name'] = new_name except Exception as e: logger.error(e) self.server._update_windows() return self def kill_window(self): proc = self.cmd( 'kill-window', '-t%s:%s' % (self.get('session_id'), self.index), ) if proc.stderr: raise exc.LibTmuxException(proc.stderr) self.server._update_windows() def move_window(self, destination="", session=None): session = session or self.get('session_id') proc = self.cmd( 'move-window', '-s%s:%s' % (self.get('session_id'), self.index), '-t%s:%s' % (session, destination), ) if proc.stderr: raise exc.LibTmuxException(proc.stderr) self.server._update_windows() def select_window(self): return self.session.select_window(self.index) def select_pane(self, target_pane): if target_pane in ['-l', '-U', '-D', '-L', '-R']: proc = self.cmd('select-pane', '-t%s' % self.id, target_pane) else: proc = self.cmd('select-pane', '-t%s' % target_pane) if proc.stderr: raise exc.LibTmuxException(proc.stderr) return self.attached_pane def last_pane(self): return self.select_pane('-l') def split_window( self, target=None, start_directory=None, attach=True, vertical=True, shell=None, percent=None, ): pformats = [ 'session_name', 'session_id', 'window_index', 'window_id', ] + formats.PANE_FORMATS tmux_formats = ['#{%s}\t' % f for f in pformats] tmux_args = tuple() if target: tmux_args += ('-t%s' % target,) else: tmux_args += ('-t%s' % self.panes[0].get('pane_id'),) if vertical: tmux_args += ('-v',) else: tmux_args += ('-h',) if percent is not None: tmux_args += ('-p %d' % percent,) tmux_args += ('-P', '-F%s' % ''.join(tmux_formats)) if start_directory: start_directory = os.path.expanduser(start_directory) tmux_args += ('-c%s' % start_directory,) if not attach: tmux_args += ('-d',) if shell: tmux_args += (shell,) pane = self.cmd('split-window', *tmux_args) if pane.stderr: raise exc.LibTmuxException(pane.stderr) if 'pane too small' in pane.stderr: pass raise exc.LibTmuxException(pane.stderr, self._info, self.panes) else: pane = pane.stdout[0] pane = dict(zip(pformats, pane.split('\t'))) pane = dict((k, v) for k, v in pane.items() if v) return Pane(window=self, **pane) @property
MIT License
elfi-dev/elfi
elfi/examples/daycare.py
ss_shannon
python
def ss_shannon(data): total_obs = np.sum(data, axis=2, keepdims=True) with np.errstate(divide='ignore', invalid='ignore'): proportions = np.nan_to_num(total_obs / np.sum(total_obs, axis=3, keepdims=True)) proportions[proportions == 0] = 1 shannon = (-np.sum(proportions * np.log(proportions), axis=3))[:, :, 0] return shannon
r"""Calculate the Shannon index of diversity of the distribution of observed strains. H = -\sum p \log(p) https://en.wikipedia.org/wiki/Diversity_index#Shannon_index Parameters ---------- data : np.array of shape (batch_size, n_dcc, n_obs, n_strains) Returns ------- np.array of shape (batch_size, n_dcc)
https://github.com/elfi-dev/elfi/blob/07ac0ed5e81d5d5fb42de63db3cf9ccc9135b88c/elfi/examples/daycare.py#L199-L221
import logging from functools import partial import numpy as np import elfi def daycare(t1, t2, t3, n_dcc=29, n_ind=53, n_strains=33, freq_strains_commun=None, n_obs=36, time_end=10., batch_size=1, random_state=None): random_state = random_state or np.random t1 = np.asanyarray(t1).reshape((-1, 1, 1, 1)) t2 = np.asanyarray(t2).reshape((-1, 1, 1, 1)) t3 = np.asanyarray(t3).reshape((-1, 1, 1, 1)) if freq_strains_commun is None: freq_strains_commun = np.full(n_strains, 0.1) prob_commun = t2 * freq_strains_commun state = np.zeros((batch_size, n_dcc, n_ind, n_strains), dtype=np.bool) time = np.zeros((batch_size, n_dcc)) n_factor = 1. / (n_ind - 1) gamma = 1. ind_b_dcc = [np.repeat(np.arange(batch_size), n_dcc), np.tile(np.arange(n_dcc), batch_size)] while np.any(time < time_end): with np.errstate(divide='ignore', invalid='ignore'): prob_strain_adjust = np.nan_to_num(state / np.sum(state, axis=3, keepdims=True)) prob_strain = np.sum(prob_strain_adjust, axis=2, keepdims=True) intrainfect_rate = t1 * (np.tile(prob_strain, (1, 1, n_ind, 1)) - prob_strain_adjust) * n_factor + 1e-9 hazards = intrainfect_rate + prob_commun any_infection = np.any(state, axis=3, keepdims=True) hazards = np.where(any_infection, t3 * hazards, hazards) hazards[state] = gamma inv_sum_hazards = 1. / np.sum(hazards, axis=(2, 3), keepdims=True) probs = hazards * inv_sum_hazards delta_t = random_state.exponential(inv_sum_hazards[:, :, 0, 0]) time = time + delta_t probs = probs.reshape((batch_size, n_dcc, -1)) cumprobs = np.cumsum(probs[:, :, :-1], axis=2) x = random_state.uniform(size=(batch_size, n_dcc, 1)) ind_transit = np.sum(x >= cumprobs, axis=2) ind_transit = ind_b_dcc + list(np.unravel_index(ind_transit.ravel(), (n_ind, n_strains))) state[ind_transit] = np.logical_not(state[ind_transit]) state_obs = state[:, :, :n_obs, :] return state_obs def get_model(true_params=None, seed_obs=None, **kwargs): logger = logging.getLogger() if true_params is None: true_params = [3.6, 0.6, 0.1] m = elfi.ElfiModel() y_obs = daycare(*true_params, random_state=np.random.RandomState(seed_obs), **kwargs) sim_fn = partial(daycare, **kwargs) priors = [] sumstats = [] priors.append(elfi.Prior('uniform', 0, 11, model=m, name='t1')) priors.append(elfi.Prior('uniform', 0, 2, model=m, name='t2')) priors.append(elfi.Prior('uniform', 0, 1, model=m, name='t3')) elfi.Simulator(sim_fn, *priors, observed=y_obs, name='DCC') sumstats.append(elfi.Summary(ss_shannon, m['DCC'], name='Shannon')) sumstats.append(elfi.Summary(ss_strains, m['DCC'], name='n_strains')) sumstats.append(elfi.Summary(ss_prevalence, m['DCC'], name='prevalence')) sumstats.append(elfi.Summary(ss_prevalence_multi, m['DCC'], name='multi')) elfi.Discrepancy(distance, *sumstats, name='d') elfi.Operation(np.log, m['d'], name='logd') logger.info("Generated observations with true parameters " "t1: %.1f, t2: %.3f, t3: %.1f, ", *true_params) return m
BSD 3-Clause New or Revised License
pythad/nider
nider/utils.py
is_path_creatable
python
def is_path_creatable(pathname): dirname = os.path.dirname(pathname) or os.getcwd() return os.access(dirname, os.W_OK)
Function to check if the current user has sufficient permissions to create the passed pathname Args: pathname (str): path to check. Returns: bool: ``True`` if the current user has sufficient permissions to create the passed ``pathname``. ``False`` otherwise.
https://github.com/pythad/nider/blob/be71e09968559c5504c59158aa339136fb891fb4/nider/utils.py#L40-L53
import os import random import warnings from contextlib import contextmanager from PIL import Image, ImageFont from nider.colors import FLAT_UI_COLORS from nider.exceptions import DefaultFontWarning, FontNotFoundWarning def get_font(fontfullpath, fontsize): if fontfullpath is None: warnings.warn(DefaultFontWarning()) font = ImageFont.load_default() font.is_default = True elif not os.path.exists(fontfullpath): warnings.warn(FontNotFoundWarning(fontfullpath)) font = ImageFont.load_default() font.is_default = True else: font = ImageFont.truetype(fontfullpath, fontsize) font.is_default = False return font
MIT License
xilinx/pyxir
python/pyxir/graph/ops/l1_basic_nn.py
log
python
def log(attrs, in_xlayers): assert len(in_xlayers) == 1 shape = in_xlayers[0].shapes[:] return {'shape': shape}
Return Log registration information (shape)
https://github.com/xilinx/pyxir/blob/bef661d6d77adcdbd2cf4163f2cf3a1d31d40406/python/pyxir/graph/ops/l1_basic_nn.py#L430-L438
import math import logging import warnings import numpy as np from typing import Dict, List, Any from pyxir.shapes import TensorShape, TupleShape, get_numpy_broadcasted_shape from ..layer.xlayer import defaultXLayer, XLayer, ConvData, ScaleData from ..layer.xlayer_factory import xop_register_factory, xop_register from ..xop_registry import xop_register_op_layout_transform, xop_register_op_transpose_transform logger = logging.getLogger("pyxir") @xop_register('Add') def add(attrs: Dict[str, Any], in_xlayers: List[XLayer]): assert len(in_xlayers) == 2 lX, rX = in_xlayers if len(lX.shapes) >= len(rX.shapes): lshape = lX.shapes[:] rshape = [None] * (len(lX.shapes) - len(rX.shapes)) + rX.shapes[:] else: rshape = rX.shapes[:] lshape = [None] * (len(rX.shapes) - len(lX.shapes)) + lX.shapes[:] assert len(lshape) == len(rshape) reversed_shape = [] for ls, rs in zip(reversed(lshape), reversed(rshape)): if ls == rs or ls in [1, None] or rs in [1, None]: if ls is None: reversed_shape.append(rs) elif rs is None: reversed_shape.append(ls) else: reversed_shape.append(max(ls, rs)) else: raise ValueError("Invalid shapes for broadcasted additions:" " {} and {}".format(lX.shapes, rX.shapes)) shape = TensorShape(list(reversed(reversed_shape))) return {'shape': shape} @xop_register_factory('BiasAdd') def bias_add(op_name: str, input_layer: XLayer, bias_layer: XLayer, axis: int, **kwargs): bottoms = [input_layer.name] attrs = kwargs attrs.update({ 'axis': axis }) logger.debug("--bias_add shape: {}".format(input_layer.shapes[:])) X = XLayer() X = X._replace( shapes=input_layer.shapes[:], sizes=input_layer.sizes[:], name=op_name, type=['BiasAdd'], data=[bias_layer.data[0]], layer=[op_name], tops=[], bottoms=bottoms, attrs=attrs, targets=[]) return X @xop_register_op_transpose_transform('BiasAdd') def bias_add_transpose_transform(X: XLayer, axes: List[int]): new_shape = TensorShape([X.shapes[i] for i in axes]) X.shapes[:] = new_shape X.attrs['axis'] = axes.index(X.attrs['axis']) @xop_register_factory('Concat') def concat(op_name: str, input_layers: List[XLayer], axis: int, **kwargs): bottoms = [input_layer.name for input_layer in input_layers] if axis < 0: axis = axis + len(input_layers[0].shapes[:]) assert len(set([len(il.shapes) for il in input_layers])) == 1 for i in range(len(list(input_layers[0].shapes))): check = set([il.shapes[i] for il in input_layers]) if len(check) > 1 and -1 in check: check.remove(-1) assert i == axis or len(check) == 1, "i: {0}, axis: {1}, check: {2}".format(i, axis, check) shape = input_layers[0].shapes[:] shape[axis] = sum([il.shapes[axis] for il in input_layers]) attrs = kwargs attrs.update({ 'axis': axis }) X = XLayer() X = X._replace( name=op_name, type=['Concat'], shapes=shape, sizes=shape.get_size(), layer=[op_name], tops=[], bottoms=bottoms, attrs=attrs, targets=[] ) return X @xop_register_op_transpose_transform('Concat') def concat_transpose_transform(X: XLayer, axes: List[int]): new_shape = TensorShape([X.shapes[i] for i in axes]) X.shapes = new_shape X.attrs['axis'] = axes.index(X.attrs['axis']) @xop_register_factory('Eltwise') def eltwise(op_name: str, lhs_layer: XLayer, rhs_layer: XLayer, **kwargs): bottoms = [lhs_layer.name, rhs_layer.name] attrs = kwargs attrs.update({ 'op': 'Add' }) X = XLayer() X = X._replace( name=op_name, type=['Eltwise'], shapes=lhs_layer.shapes[:], sizes=lhs_layer.sizes[:], layer=[op_name], tops=[], bottoms=bottoms, attrs=attrs, targets=[] ) return X @xop_register_op_transpose_transform('Eltwise') def eltwise_transpose_transform(X: XLayer, axes: List[int]): new_shape = TensorShape([X.shapes[i] for i in axes]) X.shapes[:] = new_shape @xop_register_factory('Dense') def dense(op_name: str, input_layer: XLayer, weights_layer: XLayer, units: int, data_layout: str = 'NC', kernel_layout: str = 'OI', **kwargs): if 'Constant' not in weights_layer.type: raise ValueError("Dense layer is expecting a 'Constant' weights layer" " as weights input but got layer of type: {}" .format(weights_layer.type[0])) bottoms = [input_layer.name] bias = np.zeros([units]) data = ConvData(weights_layer.data[0], bias) attrs = kwargs attrs.update({ 'W_shape': weights_layer.shapes.tolist(), 'units': units, 'data_layout': data_layout, 'kernel_layout': kernel_layout }) X = XLayer() X = X._replace( name=op_name, type=['Dense'], shapes=TensorShape([input_layer.shapes[0], units]), sizes=[units], data=data, layer=[op_name], tops=[], bottoms=bottoms, attrs=attrs, targets=[]) return X @xop_register('Divide') def divide(attrs: Dict, in_xlayers: List[XLayer]): assert len(in_xlayers) == 2 lX, rX = in_xlayers l_shape = lX.shapes[:] r_shape = rX.shapes[:] broadcast_shape = get_numpy_broadcasted_shape(l_shape, r_shape) shape = TensorShape(broadcast_shape) return {'shape': shape} @xop_register_factory('Dropout') def dropout(op_name: str, input_layer: XLayer, rate: float, **kwargs): attrs = kwargs attrs.update({ 'rate': rate }) X = XLayer() X = X._replace( name=op_name, type=['Dropout'], shapes=input_layer.shapes[:], sizes=input_layer.shapes.get_size(), layer=[op_name], tops=[], bottoms=[input_layer.name], attrs=attrs, targets=[]) return X @xop_register('Exp') def exp(attrs: Dict[str, Any], in_xlayers: List[XLayer]) -> Dict: assert len(in_xlayers) == 1 shape = in_xlayers[0].shapes[:] return {'shape': shape} @xop_register('ExpandDims') def expand_dims(attrs: Dict[str, Any], in_xlayers: List[XLayer]) -> Dict[str, List[int]]: assert len(in_xlayers) == 1 assert 'axis' in attrs assert 'num_newaxis' in attrs shape = in_xlayers[0].shapes[:] axis = attrs['axis'] num_newaxis = attrs['num_newaxis'] assert axis < 0 or axis <= len(shape) assert axis > 0 or axis >= -len(shape) - 1 if axis < 0: axis = len(shape) + axis + 1 new_shape = TensorShape(shape[:axis] + [1] * num_newaxis + shape[axis:]) return {'shape': new_shape} @xop_register('Log')
Apache License 2.0
evonove/django-stored-messages
stored_messages/templatetags/stored_messages_tags.py
stored_messages_count
python
def stored_messages_count(context): if "user" in context: user = context["user"] if user.is_authenticated(): return Inbox.objects.select_related("message").filter(user=user).count()
Renders a list of unread stored messages for the current user
https://github.com/evonove/django-stored-messages/blob/23b71f952d5d3fd03285f5e700879d05796ef7ba/stored_messages/templatetags/stored_messages_tags.py#L27-L34
from __future__ import unicode_literals from django import template from ..models import Inbox, MessageArchive register = template.Library() @register.inclusion_tag("stored_messages/stored_messages_list.html", takes_context=True) def stored_messages_list(context, num_elements=10): if "user" in context: user = context["user"] if user.is_authenticated(): qs = Inbox.objects.select_related("message").filter(user=user) return { "messages": qs[:num_elements], "count": qs.count(), } @register.assignment_tag(takes_context=True)
BSD 3-Clause New or Revised License
jenspetersen/probabilistic-unet
probunet/model.py
ProbabilisticSegmentationNet.elbo
python
def elbo(self, seg, input_=None, nll_reduction="sum", beta=1.0, make_onehot=True, make_onehot_classes=None, newaxis=False): if self.last_activations is None: raise ValueError("'last_activations' is currently None. Please pass an input first.") if input_ is not None: with torch.no_grad(): self.encode_posterior(input_, seg, make_onehot=make_onehot, make_onehot_classes=make_onehot_classes, newaxis=newaxis) if make_onehot and newaxis: pass elif make_onehot and not newaxis: seg = seg[:, 0] else: seg = torch.argmax(seg, 1, keepdim=False) kl = self.kl_divergence() nll = nn.NLLLoss(reduction=nll_reduction)(self.reconstruct(sample=None, use_posterior_mean=True, out_device=None), seg.long()) return - (beta * nll + kl)
Compute the ELBO with seg as ground truth. * Prior is expected and will not be encoded. * If input_ is given, posterior will automatically be encoded. * Either input_ or stored activations must be available.
https://github.com/jenspetersen/probabilistic-unet/blob/ce4708045a3fa3c9d23d44300920e2177fea7140/probunet/model.py#L660-L682
import torch import torch.nn as nn from probunet.util import make_onehot as make_onehot_segmentation, make_slices, match_to def is_conv(op): conv_types = (nn.Conv1d, nn.Conv2d, nn.Conv3d, nn.ConvTranspose1d, nn.ConvTranspose2d, nn.ConvTranspose3d) if type(op) == type and issubclass(op, conv_types): return True elif type(op) in conv_types: return True else: return False class ConvModule(nn.Module): def __init__(self, *args, **kwargs): super(ConvModule, self).__init__() def init_weights(self, init_fn, *args, **kwargs): class init_(object): def __init__(self): self.fn = init_fn self.args = args self.kwargs = kwargs def __call__(self, module): if is_conv(type(module)): module.weight = self.fn(module.weight, *self.args, **self.kwargs) _init_ = init_() self.apply(_init_) def init_bias(self, init_fn, *args, **kwargs): class init_(object): def __init__(self): self.fn = init_fn self.args = args self.kwargs = kwargs def __call__(self, module): if is_conv(type(module)) and module.bias is not None: module.bias = self.fn(module.bias, *self.args, **self.kwargs) _init_ = init_() self.apply(_init_) class ConcatCoords(nn.Module): def forward(self, input_): dim = input_.dim() - 2 coord_channels = [] for i in range(dim): view = [1, ] * dim view[i] = -1 repeat = list(input_.shape[2:]) repeat[i] = 1 coord_channels.append( torch.linspace(-0.5, 0.5, input_.shape[i+2]) .view(*view) .repeat(*repeat) .to(device=input_.device, dtype=input_.dtype)) coord_channels = torch.stack(coord_channels).unsqueeze(0) repeat = [1, ] * input_.dim() repeat[0] = input_.shape[0] coord_channels = coord_channels.repeat(*repeat).contiguous() return torch.cat([input_, coord_channels], 1) class InjectionConvEncoder(ConvModule): _default_activation_kwargs = dict(inplace=True) _default_norm_kwargs = dict() _default_conv_kwargs = dict(kernel_size=3, padding=1) _default_pool_kwargs = dict(kernel_size=2) _default_dropout_kwargs = dict() _default_global_pool_kwargs = dict() def __init__(self, in_channels=1, out_channels=6, depth=4, injection_depth="last", injection_channels=0, block_depth=2, num_feature_maps=24, feature_map_multiplier=2, activation_op=nn.LeakyReLU, activation_kwargs=None, norm_op=nn.InstanceNorm2d, norm_kwargs=None, norm_depth=0, conv_op=nn.Conv2d, conv_kwargs=None, pool_op=nn.AvgPool2d, pool_kwargs=None, dropout_op=None, dropout_kwargs=None, global_pool_op=nn.AdaptiveAvgPool2d, global_pool_kwargs=None, **kwargs): super(InjectionConvEncoder, self).__init__(**kwargs) self.in_channels = in_channels self.out_channels = out_channels self.depth = depth self.injection_depth = depth - 1 if injection_depth == "last" else injection_depth self.injection_channels = injection_channels self.block_depth = block_depth self.num_feature_maps = num_feature_maps self.feature_map_multiplier = feature_map_multiplier self.activation_op = activation_op self.activation_kwargs = self._default_activation_kwargs if activation_kwargs is not None: self.activation_kwargs.update(activation_kwargs) self.norm_op = norm_op self.norm_kwargs = self._default_norm_kwargs if norm_kwargs is not None: self.norm_kwargs.update(norm_kwargs) self.norm_depth = depth if norm_depth == "full" else norm_depth self.conv_op = conv_op self.conv_kwargs = self._default_conv_kwargs if conv_kwargs is not None: self.conv_kwargs.update(conv_kwargs) self.pool_op = pool_op self.pool_kwargs = self._default_pool_kwargs if pool_kwargs is not None: self.pool_kwargs.update(pool_kwargs) self.dropout_op = dropout_op self.dropout_kwargs = self._default_dropout_kwargs if dropout_kwargs is not None: self.dropout_kwargs.update(dropout_kwargs) self.global_pool_op = global_pool_op self.global_pool_kwargs = self._default_global_pool_kwargs if global_pool_kwargs is not None: self.global_pool_kwargs.update(global_pool_kwargs) for d in range(self.depth): in_ = self.in_channels if d == 0 else self.num_feature_maps * (self.feature_map_multiplier**(d-1)) out_ = self.num_feature_maps * (self.feature_map_multiplier**d) if d == self.injection_depth + 1: in_ += self.injection_channels layers = [] if d > 0: layers.append(self.pool_op(**self.pool_kwargs)) for b in range(self.block_depth): current_in = in_ if b == 0 else out_ layers.append(self.conv_op(current_in, out_, **self.conv_kwargs)) if self.norm_op is not None and d < self.norm_depth: layers.append(self.norm_op(out_, **self.norm_kwargs)) if self.activation_op is not None: layers.append(self.activation_op(**self.activation_kwargs)) if self.dropout_op is not None: layers.append(self.dropout_op(**self.dropout_kwargs)) if d == self.depth - 1: current_conv_kwargs = self.conv_kwargs.copy() current_conv_kwargs["kernel_size"] = 1 current_conv_kwargs["padding"] = 0 current_conv_kwargs["bias"] = False layers.append(self.conv_op(out_, out_channels, **current_conv_kwargs)) self.add_module("encode_{}".format(d), nn.Sequential(*layers)) if self.global_pool_op is not None: self.add_module("global_pool", self.global_pool_op(1, **self.global_pool_kwargs)) def forward(self, x, injection=None): for d in range(self.depth): x = self._modules["encode_{}".format(d)](x) if d == self.injection_depth and self.injection_channels > 0: injection = match_to(injection, x, self.injection_channels) x = torch.cat([x, injection], 1) if hasattr(self, "global_pool"): x = self.global_pool(x) return x class InjectionConvEncoder3D(InjectionConvEncoder): def __init__(self, *args, **kwargs): update_kwargs = dict( norm_op=nn.InstanceNorm3d, conv_op=nn.Conv3d, pool_op=nn.AvgPool3d, global_pool_op=nn.AdaptiveAvgPool3d ) for (arg, val) in update_kwargs.items(): if arg not in kwargs: kwargs[arg] = val super(InjectionConvEncoder3D, self).__init__(*args, **kwargs) class InjectionUNet(ConvModule): def __init__( self, depth=5, in_channels=4, out_channels=4, kernel_size=3, dilation=1, num_feature_maps=24, block_depth=2, num_1x1_at_end=3, injection_channels=3, injection_at="end", activation_op=nn.LeakyReLU, activation_kwargs=None, pool_op=nn.AvgPool2d, pool_kwargs=dict(kernel_size=2), dropout_op=None, dropout_kwargs=None, norm_op=nn.InstanceNorm2d, norm_kwargs=None, conv_op=nn.Conv2d, conv_kwargs=None, upconv_op=nn.ConvTranspose2d, upconv_kwargs=None, output_activation_op=None, output_activation_kwargs=None, return_bottom=False, coords=False, coords_dim=2, **kwargs ): super(InjectionUNet, self).__init__(**kwargs) self.depth = depth self.in_channels = in_channels self.out_channels = out_channels self.kernel_size = kernel_size self.dilation = dilation self.padding = (self.kernel_size + (self.kernel_size-1) * (self.dilation-1)) // 2 self.num_feature_maps = num_feature_maps self.block_depth = block_depth self.num_1x1_at_end = num_1x1_at_end self.injection_channels = injection_channels self.injection_at = injection_at self.activation_op = activation_op self.activation_kwargs = {} if activation_kwargs is None else activation_kwargs self.pool_op = pool_op self.pool_kwargs = {} if pool_kwargs is None else pool_kwargs self.dropout_op = dropout_op self.dropout_kwargs = {} if dropout_kwargs is None else dropout_kwargs self.norm_op = norm_op self.norm_kwargs = {} if norm_kwargs is None else norm_kwargs self.conv_op = conv_op self.conv_kwargs = {} if conv_kwargs is None else conv_kwargs self.upconv_op = upconv_op self.upconv_kwargs = {} if upconv_kwargs is None else upconv_kwargs self.output_activation_op = output_activation_op self.output_activation_kwargs = {} if output_activation_kwargs is None else output_activation_kwargs self.return_bottom = return_bottom if not coords: self.coords = [[], []] elif coords is True: self.coords = [list(range(depth)), []] else: self.coords = coords self.coords_dim = coords_dim self.last_activations = None for d in range(self.depth): block = [] if d > 0: block.append(self.pool_op(**self.pool_kwargs)) for i in range(self.block_depth): if d == self.depth - 1 and i > 0: continue out_size = self.num_feature_maps * 2**d if d == 0 and i == 0: in_size = self.in_channels elif i == 0: in_size = self.num_feature_maps * 2**(d - 1) else: in_size = out_size if d in self.coords[0] and i == 0: block.append(ConcatCoords()) in_size += self.coords_dim block.append(self.conv_op(in_size, out_size, self.kernel_size, padding=self.padding, dilation=self.dilation, **self.conv_kwargs)) if self.dropout_op is not None: block.append(self.dropout_op(**self.dropout_kwargs)) if self.norm_op is not None: block.append(self.norm_op(out_size, **self.norm_kwargs)) block.append(self.activation_op(**self.activation_kwargs)) self.add_module("encode-{}".format(d), nn.Sequential(*block)) for d in reversed(range(self.depth)): block = [] for i in range(self.block_depth): if d == self.depth - 1 and i > 0: continue out_size = self.num_feature_maps * 2**(d) if i == 0 and d < self.depth - 1: in_size = self.num_feature_maps * 2**(d+1) elif i == 0 and self.injection_at == "bottom": in_size = out_size + self.injection_channels else: in_size = out_size if d in self.coords[0] and i == 0 and d < self.depth - 1: block.append(ConcatCoords()) in_size += self.coords_dim block.append(self.conv_op(in_size, out_size, self.kernel_size, padding=self.padding, dilation=self.dilation, **self.conv_kwargs)) if self.dropout_op is not None: block.append(self.dropout_op(**self.dropout_kwargs)) if self.norm_op is not None: block.append(self.norm_op(out_size, **self.norm_kwargs)) block.append(self.activation_op(**self.activation_kwargs)) if d > 0: block.append(self.upconv_op(out_size, out_size // 2, self.kernel_size, 2, padding=self.padding, dilation=self.dilation, output_padding=1, **self.upconv_kwargs)) self.add_module("decode-{}".format(d), nn.Sequential(*block)) if self.injection_at == "end": out_size += self.injection_channels in_size = out_size for i in range(self.num_1x1_at_end): if i == self.num_1x1_at_end - 1: out_size = self.out_channels current_conv_kwargs = self.conv_kwargs.copy() current_conv_kwargs["bias"] = True self.add_module("reduce-{}".format(i), self.conv_op(in_size, out_size, 1, **current_conv_kwargs)) if i != self.num_1x1_at_end - 1: self.add_module("reduce-{}-nonlin".format(i), self.activation_op(**self.activation_kwargs)) if self.output_activation_op is not None: self.add_module("output-activation", self.output_activation_op(**self.output_activation_kwargs)) def reset(self): self.last_activations = None def forward(self, x, injection=None, reuse_last_activations=False, store_activations=False): if self.injection_at == "bottom": reuse_last_activations = False store_activations = False if self.last_activations is None or reuse_last_activations is False: enc = [x] for i in range(self.depth - 1): enc.append(self._modules["encode-{}".format(i)](enc[-1])) bottom_rep = self._modules["encode-{}".format(self.depth - 1)](enc[-1]) if self.injection_at == "bottom" and self.injection_channels > 0: injection = match_to(injection, bottom_rep, (0, 1)) bottom_rep = torch.cat((bottom_rep, injection), 1) x = self._modules["decode-{}".format(self.depth - 1)](bottom_rep) for i in reversed(range(self.depth - 1)): x = self._modules["decode-{}".format(i)](torch.cat((enc[-(self.depth - 1 - i)], x), 1)) if store_activations: self.last_activations = x.detach() else: x = self.last_activations if self.injection_at == "end" and self.injection_channels > 0: injection = match_to(injection, x, (0, 1)) x = torch.cat((x, injection), 1) for i in range(self.num_1x1_at_end): x = self._modules["reduce-{}".format(i)](x) if self.output_activation_op is not None: x = self._modules["output-activation"](x) if self.return_bottom and not reuse_last_activations: return x, bottom_rep else: return x class InjectionUNet3D(InjectionUNet): def __init__(self, *args, **kwargs): update_kwargs = dict( pool_op=nn.AvgPool3d, norm_op=nn.InstanceNorm3d, conv_op=nn.Conv3d, upconv_op=nn.ConvTranspose3d, coords_dim=3 ) for (arg, val) in update_kwargs.items(): if arg not in kwargs: kwargs[arg] = val super(InjectionUNet3D, self).__init__(*args, **kwargs) class ProbabilisticSegmentationNet(ConvModule): def __init__(self, in_channels=4, out_channels=4, num_feature_maps=24, latent_size=3, depth=5, latent_distribution=torch.distributions.Normal, task_op=InjectionUNet3D, task_kwargs=None, prior_op=InjectionConvEncoder3D, prior_kwargs=None, posterior_op=InjectionConvEncoder3D, posterior_kwargs=None, **kwargs): super(ProbabilisticSegmentationNet, self).__init__(**kwargs) self.task_op = task_op self.task_kwargs = {} if task_kwargs is None else task_kwargs self.prior_op = prior_op self.prior_kwargs = {} if prior_kwargs is None else prior_kwargs self.posterior_op = posterior_op self.posterior_kwargs = {} if posterior_kwargs is None else posterior_kwargs default_task_kwargs = dict( in_channels=in_channels, out_channels=out_channels, num_feature_maps=num_feature_maps, injection_size=latent_size, depth=depth ) default_prior_kwargs = dict( in_channels=in_channels, num_feature_maps=num_feature_maps, z_dim=latent_size, depth=depth ) default_posterior_kwargs = dict( in_channels=in_channels+out_channels, num_feature_maps=num_feature_maps, z_dim=latent_size, depth=depth ) default_task_kwargs.update(self.task_kwargs) self.task_kwargs = default_task_kwargs default_prior_kwargs.update(self.prior_kwargs) self.prior_kwargs = default_prior_kwargs default_posterior_kwargs.update(self.posterior_kwargs) self.posterior_kwargs = default_posterior_kwargs self.latent_distribution = latent_distribution self._prior = None self._posterior = None self.make_modules() def make_modules(self): if type(self.task_op) == type: self.add_module("task_net", self.task_op(**self.task_kwargs)) else: self.add_module("task_net", self.task_op) if type(self.prior_op) == type: self.add_module("prior_net", self.prior_op(**self.prior_kwargs)) else: self.add_module("prior_net", self.prior_op) if type(self.posterior_op) == type: self.add_module("posterior_net", self.posterior_op(**self.posterior_kwargs)) else: self.add_module("posterior_net", self.posterior_op) @property def prior(self): return self._prior @property def posterior(self): return self._posterior @property def last_activations(self): return self.task_net.last_activations def train(self, mode=True): super(ProbabilisticSegmentationNet, self).train(mode) self.reset() def reset(self): self.task_net.reset() self._prior = None self._posterior = None def forward(self, input_, seg=None, make_onehot=True, make_onehot_classes=None, newaxis=False): self.encode_prior(input_) if self.training: self.encode_posterior(input_, seg, make_onehot, make_onehot_classes, newaxis) sample = self.posterior.rsample() else: sample = self.prior.loc return self.task_net(input_, sample, store_activations=not self.training) def encode_prior(self, input_): rep = self.prior_net(input_) if isinstance(rep, tuple): mean, logvar = rep elif torch.is_tensor(rep): mean, logvar = torch.split(rep, rep.shape[1] // 2, dim=1) self._prior = self.latent_distribution(mean, logvar.mul(0.5).exp()) return self._prior def encode_posterior(self, input_, seg, make_onehot=True, make_onehot_classes=None, newaxis=False): if make_onehot: if make_onehot_classes is None: make_onehot_classes = tuple(range(self.posterior_net.in_channels - input_.shape[1])) seg = make_onehot_segmentation(seg, make_onehot_classes, newaxis=newaxis) rep = self.posterior_net(torch.cat((input_, seg.float()), 1)) if isinstance(rep, tuple): mean, logvar = rep elif torch.is_tensor(rep): mean, logvar = torch.split(rep, rep.shape[1] // 2, dim=1) self._posterior = self.latent_distribution(mean, logvar.mul(0.5).exp()) return self._posterior def sample_prior(self, N=1, out_device=None, input_=None): if out_device is None: if self.last_activations is not None: out_device = self.last_activations.device elif input_ is not None: out_device = input_.device else: out_device = next(self.task_net.parameters()).device with torch.no_grad(): if self.prior is None or input_ is not None: self.encode_prior(input_) result = [] if input_ is not None: result.append(self.task_net(input_, self.prior.sample(), reuse_last_activations=False, store_activations=True).to(device=out_device)) while len(result) < N: result.append(self.task_net(input_, self.prior.sample(), reuse_last_activations=self.last_activations is not None, store_activations=False).to(device=out_device)) if N == 1: return result[0] else: return result def reconstruct(self, sample=None, use_posterior_mean=True, out_device=None, input_=None): if self.posterior is None and sample is None: raise ValueError("'posterior' is currently None. Please pass an input and a segmentation first.") if out_device is None: out_device = next(self.task_net.parameters()).device if sample is None: if use_posterior_mean: sample = self.posterior.loc else: sample = self.posterior.sample() else: sample = sample.to(next(self.task_net.parameters()).device) with torch.no_grad(): return self.task_net(input_, sample, reuse_last_activations=True).to(device=out_device) def kl_divergence(self): if self.posterior is None or self.prior is None: raise ValueError("'prior' and 'posterior' must not be None, but prior={} and posterior={}".format(self.prior, self.posterior)) return torch.distributions.kl_divergence(self.posterior, self.prior).sum()
MIT License
amosbastian/fpl
fpl/cli.py
picks
python
async def picks(user_id): async with aiohttp.ClientSession() as session: fpl = FPL(session) user = await fpl.get_user(user_id) await format_picks(user)
Echoes a user's picks to the terminal.
https://github.com/amosbastian/fpl/blob/cce084026b039e36fee621f25c91c70a0fa45156/fpl/cli.py#L315-L320
import os import sqlite3 import aiohttp import click from appdirs import user_data_dir from prettytable import PrettyTable from fpl import FPL from .constants import MYTEAM_FORMAT, PICKS_FORMAT from .utils import chip_converter, coroutine, position_converter data_directory = user_data_dir("fpl", "fpl") os.makedirs(data_directory, exist_ok=True) sql_file = os.path.join(data_directory, "fpl.sqlite") connection = sqlite3.connect(sql_file) class HiddenPassword(object): def __init__(self, password=""): self.password = password def __str__(self): return "*" * len(self.password) def table_exists(table_name): query = ("SELECT name FROM sqlite_master " f"WHERE type='table' AND name='{table_name}'") try: cursor = connection.cursor() cursor.execute(query) return True if cursor.fetchone() else False except sqlite3.OperationalError: return False @click.group() def cli(): pass def get_starters(players, position): starters = [player for player in players if position_converter( player.element_type) == position] return starters async def get_picks(team): player_ids = [player["element"] for player in team] async with aiohttp.ClientSession() as session: fpl = FPL(session) players = await fpl.get_players(player_ids) for player_data in team: for player in players: if player_data["element"] != player.id: continue player.role = "" player.event_points = (player.event_points * player_data["multiplier"]) player.team_position = player_data["position"] player.is_captain = player_data["is_captain"] if player.is_captain: player.role = " (C)" player.is_vice_captain = player_data["is_vice_captain"] if player.is_vice_captain: player.role = " (VC)" if player.status in ["d", "u"]: player.colour = "yellow" elif player.status in ["i"]: player.colour = "red" else: player.colour = None return players def team_width(positions, points=False): width = 0 for position in positions: if points: player_names = [PICKS_FORMAT.format( player.web_name, player.event_points, player.role) for player in position] else: player_names = [MYTEAM_FORMAT.format( player.web_name, player.role) for player in position] position_width = len(" - ".join(player_names)) if position_width > width: width = position_width return width def used_chips(chips): if not chips: return "NONE." used = ["{} (GW {})".format(chip_converter(chip["name"]), chip["event"]) for chip in chips] return ", ".join(used) def available_chips(chips): available = ["WC", "TC", "BB", "FH"] used = [chip_converter(chip["name"]) for chip in chips] return ", ".join(list(set(available) - set(used))) def split_by_position(team): return [ get_starters(team[:11], "Goalkeeper"), get_starters(team[:11], "Defender"), get_starters(team[:11], "Midfielder"), get_starters(team[:11], "Forward"), team[-4:] ] def team_printer(positions, formatter, points=False): width = team_width(positions[1:], points) for position in positions: player_names = [] ansi_padding = 0 for player in position: if points: player_information = ( player.event_points, player.web_name, player.role) else: player_information = (player.web_name, player.role) normal_string = formatter.format(*player_information) ansi_string = click.style(normal_string, fg=player.colour) player_names.append(ansi_string) ansi_padding += len(ansi_string) - len(normal_string) player_string = " - ".join(player_names) formatted_string = "{:^{}}".format(player_string, width + ansi_padding) click.echo(formatted_string) async def myteam_table(user): table = PrettyTable() table.field_names = ["Key", "Value"] table.add_row( ["Overall points", "{:,}".format(user.summary_overall_points)]) table.add_row(["Overall rank", "{:,}".format(user.summary_overall_rank)]) table.add_row(["Gameweek points", user.summary_event_points]) table.add_row(["Squad value", "£{}m".format(user.value)]) table.add_row(["In the bank", "£{}m".format(user.bank)]) table.add_row(["Chips used", used_chips(await user.get_chips_history())]) table.add_row(["Chips available", available_chips(await user.get_chips_history())]) table.align["Key"] = "l" table.align["Value"] = "r" click.echo(str(table).split("\n", 2)[2]) async def format_myteam(user): team = await user.get_team() players = sorted(await get_picks(team), key=lambda x: x.team_position) goalkeeper, defenders, midfielders, forwards, bench = split_by_position( players) team_printer([goalkeeper, defenders, midfielders, forwards], MYTEAM_FORMAT) click.echo("\nSubstitutes: {}".format(", ".join( [click.style("{}".format(player.web_name), fg=player.colour) for player in bench]))) await myteam_table(user) def get_account_data(index): cursor = connection.cursor() if table_exists("accounts"): accounts = cursor.execute("SELECT * from accounts").fetchall() if accounts: return accounts[0][index] return "" @cli.command() @click.argument("user_id", default=get_account_data(1)) @click.option("--email", prompt="Email address", envvar="FPL_EMAIL", default=get_account_data(2), help="FPL email address", show_default="email saved in SQLite database") @click.option("--password", prompt=True, hide_input=True, envvar="FPL_PASSWORD", default=HiddenPassword(get_account_data(3)), help="FPL password", show_default="password saved in SQLite database") @coroutine async def myteam(user_id, email, password): if isinstance(password, HiddenPassword): password = password.password async with aiohttp.ClientSession() as session: fpl = FPL(session) await fpl.login(email, password) try: user = await fpl.get_user(user_id) await format_myteam(user) except KeyError: raise click.BadParameter("email address or password.") def automatic_substitutions(user_information, players): substitution_ids = [(player["element_in"], player["element_out"]) for player in user_information["automatic_subs"]] substitutions = [] for player_in_id, player_out_id in substitution_ids: player_in = [player for player in players if player.player_id == player_in_id][0] player_out = [player for player in players if player.player_id == player_out_id][0] substitutions.append("{} {} -> {} {}".format( player_out.event_points, click.style(player_out.name, fg=player_out.colour), player_in.event_points, click.style(player_in.name, fg=player_in.colour))) return ", ".join(substitutions) def picks_table(user, user_information, players): table = PrettyTable() table.field_names = ["Key", "Value"] table.add_row(["Gamweek points", user.summary_overall_points]) table.add_row(["Gameweek rank", "{:,}".format(user.summary_overall_rank)]) gameweek_transfers = user_information["entry_history"]["event_transfers"] point_hit = user_information["entry_history"]["event_transfers_cost"] if point_hit < 0: table.add_row(["Gameweek transfers", "{} ({})".format( gameweek_transfers, point_hit)]) else: table.add_row(["Gameweek transfers", gameweek_transfers]) table.add_row(["Points on bench", user_information["entry_history"][ "points_on_bench"]]) table.add_row(["Automatic substitutions", automatic_substitutions( user_information, players)]) table.align["Key"] = "l" table.align["Value"] = "r" click.echo(str(table).split("\n", 2)[2]) async def format_picks(user): user_picks = await user.get_picks() user_information = user_picks[len(user_picks)] players = sorted(await get_picks(user_information["picks"]), key=lambda x: x.team_position) goalkeeper, defenders, midfielders, forwards, bench = split_by_position( players) team_printer([goalkeeper, defenders, midfielders, forwards], PICKS_FORMAT, True) click.echo("\nSubstitutes: {}".format(", ".join( ["{} {}".format(player.event_points, click.style( player.web_name, fg=player.colour)) for player in bench]))) picks_table(user, user_information, players) @cli.command() @click.argument("user_id") @coroutine
MIT License
mosquito/aio-pika
aio_pika/queue.py
Queue.cancel
python
async def cancel( self, consumer_tag: ConsumerTag, timeout=None, nowait: bool = False ) -> aiormq.spec.Basic.CancelOk: return await asyncio.wait_for( self.channel.basic_cancel( consumer_tag=consumer_tag, nowait=nowait, ), timeout=timeout, )
This method cancels a consumer. This does not affect already delivered messages, but it does mean the server will not send any more messages for that consumer. The client may receive an arbitrary number of messages in between sending the cancel method and receiving the cancel-ok reply. It may also be sent from the server to the client in the event of the consumer being unexpectedly cancelled (i.e. cancelled for any reason other than the server receiving the corresponding basic.cancel from the client). This allows clients to be notified of the loss of consumers due to events such as queue deletion. :param consumer_tag: consumer tag returned by :func:`~aio_pika.Queue.consume` :param timeout: execution timeout :param bool nowait: Do not expect a Basic.CancelOk response :return: Basic.CancelOk when operation completed successfully
https://github.com/mosquito/aio-pika/blob/99a90e46aeb34191753931011962c14434b5593e/aio_pika/queue.py#L244-L269
import asyncio from collections import namedtuple from functools import partial from logging import getLogger from typing import Any, Callable, Optional import aiormq from aiormq.types import DeliveredMessage from .exceptions import QueueEmpty from .exchange import Exchange, ExchangeParamType from .message import IncomingMessage from .tools import create_task, shield log = getLogger(__name__) ConsumerTag = str DeclarationResult = namedtuple( "DeclarationResult", ("message_count", "consumer_count"), ) async def consumer(callback, msg: DeliveredMessage, *, no_ack, loop): message = IncomingMessage(msg, no_ack=no_ack) return await create_task(callback, message, loop=loop) class Queue: def __init__( self, connection, channel: aiormq.Channel, name, durable, exclusive, auto_delete, arguments, passive: bool = False, ): self.loop = connection.loop self._channel = channel self.name = name or "" self.durable = durable self.exclusive = exclusive self.auto_delete = auto_delete self.arguments = arguments self.passive = passive self.declaration_result = None self._get_lock = asyncio.Lock() @property def channel(self) -> aiormq.Channel: if self._channel is None: raise RuntimeError("Channel not opened") return self._channel def __str__(self): return "%s" % self.name def __repr__(self): return ( "<Queue(%s): " "auto_delete=%s, " "durable=%s, " "exclusive=%s, " "arguments=%r>" ) % ( self, self.auto_delete, self.durable, self.exclusive, self.arguments, ) async def declare( self, timeout: int = None ) -> aiormq.spec.Queue.DeclareOk: log.debug("Declaring queue: %r", self) self.declaration_result = await asyncio.wait_for( self._channel.queue_declare( queue=self.name, durable=self.durable, exclusive=self.exclusive, auto_delete=self.auto_delete, arguments=self.arguments, passive=self.passive, ), timeout=timeout, ) self.name = self.declaration_result.queue return self.declaration_result async def bind( self, exchange: ExchangeParamType, routing_key: str = None, *, arguments=None, timeout: int = None ) -> aiormq.spec.Queue.BindOk: if routing_key is None: routing_key = self.name log.debug( "Binding queue %r: exchange=%r, routing_key=%r, arguments=%r", self, exchange, routing_key, arguments, ) return await asyncio.wait_for( self.channel.queue_bind( self.name, exchange=Exchange._get_exchange_name(exchange), routing_key=routing_key, arguments=arguments, ), timeout=timeout, ) async def unbind( self, exchange: ExchangeParamType, routing_key: str = None, arguments: dict = None, timeout: int = None, ) -> aiormq.spec.Queue.UnbindOk: if routing_key is None: routing_key = self.name log.debug( "Unbinding queue %r: exchange=%r, routing_key=%r, arguments=%r", self, exchange, routing_key, arguments, ) return await asyncio.wait_for( self.channel.queue_unbind( queue=self.name, exchange=Exchange._get_exchange_name(exchange), routing_key=routing_key, arguments=arguments, ), timeout=timeout, ) async def consume( self, callback: Callable[[IncomingMessage], Any], no_ack: bool = False, exclusive: bool = False, arguments: dict = None, consumer_tag=None, timeout=None, ) -> ConsumerTag: log.debug("Start to consuming queue: %r", self) return ( await asyncio.wait_for( self.channel.basic_consume( queue=self.name, consumer_callback=partial( consumer, callback, no_ack=no_ack, loop=self.loop, ), exclusive=exclusive, no_ack=no_ack, arguments=arguments, consumer_tag=consumer_tag, ), timeout=timeout, ) ).consumer_tag
Apache License 2.0
googlearchive/simian
src/tests/simian/mac/common/test.py
RequestHandlerTest.MockDoAnyAuth
python
def MockDoAnyAuth(self, fail=False, and_return=None): if 'authDoAnyAuth' not in self._set_mock: self.mox.StubOutWithMock(auth, 'DoAnyAuth') self._set_mock['authDoAnyAuth'] = 1 if fail: auth.DoAnyAuth().AndRaise(auth.NotAuthenticated) else: auth.DoAnyAuth().AndReturn(and_return)
Mock calling auth.DoAnyAuth(). Args: fail: bool, whether to fail or not and_return: any, variable to pass to AndReturn, default None
https://github.com/googlearchive/simian/blob/fb9c43946ff7ba29be417068d6447cfc0adfe9ef/src/tests/simian/mac/common/test.py#L165-L178
import base64 import tests.appenginesdk from google.appengine.ext import deferred from google.appengine.ext import testbed from google.apputils import basetest from tests.simian.mac.common import test_base as test_base from simian import settings from simian.mac.common import auth def GetArgFromCallHistory(mock_fn, call_index=0, arg_index=0): return mock_fn.call_args_list[call_index][0][arg_index] class AppengineTest(basetest.TestCase): def setUp(self): super(AppengineTest, self).setUp() self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.setup_env( overwrite=True, USER_EMAIL='user@example.com', USER_ID='123', USER_IS_ADMIN='0', DEFAULT_VERSION_HOSTNAME='example.appspot.com') self.testbed.init_all_stubs() def tearDown(self): super(AppengineTest, self).tearDown() self.testbed.deactivate() def RunAllDeferredTasks(self, queue_name='default'): taskqueue = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME) tasks = taskqueue.GetTasks(queue_name) for task in tasks: deferred.run(base64.b64decode(task['body'])) taskqueue.DeleteTask(queue_name, task['name']) class GenericContainer(test_base.GenericContainer): class RequestHandlerTest(test_base.RequestHandlerTest): def setUp(self): super(RequestHandlerTest, self).setUp() self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.setup_env( overwrite=True, USER_EMAIL='user@example.com', USER_ID='123', USER_IS_ADMIN='0', DEFAULT_VERSION_HOSTNAME='example.appspot.com') self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.testbed.init_taskqueue_stub() self.testbed.init_user_stub() self.testbed.init_mail_stub() settings.ADMINS = ['admin@example.com'] def tearDown(self): super(RequestHandlerTest, self).tearDown() self.testbed.deactivate() def MockDoUserAuth(self, user=None, is_admin=None, fail=False): if 'authDoUserAuth' not in self._set_mock: self.mox.StubOutWithMock(auth, 'DoUserAuth') self._set_mock['authDoUserAuth'] = 1 if fail: if is_admin is None: auth.DoUserAuth().AndRaise(auth.NotAuthenticated) else: auth.DoUserAuth(is_admin=is_admin).AndRaise(auth.NotAuthenticated) else: if is_admin is None: auth.DoUserAuth().AndReturn(user) else: auth.DoUserAuth(is_admin=is_admin).AndReturn(user) def MockDoOAuthAuth(self, user=None, is_admin=None, fail=False): if not 'authDoOAuthAuth' in self._set_mock: self.mox.StubOutWithMock(auth, 'DoOAuthAuth') self._set_mock['authDoOAuthAuth'] = 1 if fail: if is_admin is None: auth.DoOAuthAuth().AndRaise(auth.NotAuthenticated) else: auth.DoOAuthAuth(is_admin=is_admin).AndRaise(auth.NotAuthenticated) else: if is_admin is None: auth.DoOAuthAuth().AndReturn(user) else: auth.DoOAuthAuth(is_admin=is_admin).AndReturn(user) def MockDoMunkiAuth(self, fail=False, and_return=None, **kwargs): munki_auth_module = self.GetTestClassModule().gaeserver if not hasattr(munki_auth_module, 'DoMunkiAuth'): raise NotImplementedError('MockDoMunkiAuth for non-Munki handler class') if 'authDoMunkiAuth' not in self._set_mock: self.mox.StubOutWithMock(munki_auth_module, 'DoMunkiAuth') self._set_mock['authDoMunkiAuth'] = 1 if fail: self.GetTestClassModule().gaeserver.DoMunkiAuth(**kwargs).AndRaise( munki_auth_module.NotAuthenticated) else: self.GetTestClassModule().gaeserver.DoMunkiAuth(**kwargs).AndReturn( and_return)
Apache License 2.0
mar10/pyftpsync
ftpsync/synchronizers.py
BiDirSynchronizer.on_conflict
python
def on_conflict(self, pair): any_entry = pair.any_entry if not self._test_match_or_print(any_entry): return resolve = self._interactive_resolve(pair) if resolve == "skip": self._log_action("skip", "conflict", "*?*", any_entry) self._inc_stat("conflict_files_skipped") return if pair.local and pair.remote: assert pair.local.is_file() is_newer = pair.local > pair.remote if ( resolve == "local" or (is_newer and resolve == "new") or (not is_newer and resolve == "old") ): self._log_action("copy", "conflict", "*>*", pair.local) self._copy_file(self.local, self.remote, pair.local) elif ( resolve == "remote" or (is_newer and resolve == "old") or (not is_newer and resolve == "new") ): self._log_action("copy", "conflict", "*<*", pair.local) self._copy_file(self.remote, self.local, pair.remote) else: raise NotImplementedError elif pair.local: assert pair.local.is_file() if resolve == "local": self._log_action("restore", "conflict", "*>x", pair.local) self._copy_file(self.local, self.remote, pair.local) elif resolve == "remote": self._log_action("delete", "conflict", "*<x", pair.local) self._remove_file(pair.local) else: raise NotImplementedError else: assert pair.remote.is_file() if resolve == "local": self._log_action("delete", "conflict", "x>*", pair.remote) self._remove_file(pair.remote) elif resolve == "remote": self._log_action("restore", "conflict", "x<*", pair.remote) self._copy_file(self.remote, self.local, pair.remote) else: raise NotImplementedError return
Return False to prevent visiting of children.
https://github.com/mar10/pyftpsync/blob/fd6471b34cc2145cdc9a469016ad08273d13b211/ftpsync/synchronizers.py#L942-L996
import fnmatch import sys import time from ftpsync.ftp_target import FTPTarget from ftpsync.metadata import DirMetadata from ftpsync.resources import DirectoryEntry, EntryPair, FileEntry, operation_map from ftpsync.util import ( DRY_RUN_PREFIX, IS_REDIRECTED, VT_ERASE_LINE, ansi_code, byte_compare, colorama, eps_compare, pretty_stamp, write, ) CONFIG_FILE_NAME = "pyftpsync.yaml" DEFAULT_OMIT = [".DS_Store", ".git", ".hg", ".svn"] ALWAYS_OMIT = (CONFIG_FILE_NAME, DirMetadata.META_FILE_NAME, DirMetadata.LOCK_FILE_NAME) _ts = pretty_stamp def process_options(opts): match = opts.get("match") if match and type(match) is str: opts["match"] = [pat.strip() for pat in match.split(",")] elif match: assert type(match) is list else: opts["match"] = [] exclude = opts.get("exclude") if exclude and type(exclude) is str: opts["exclude"] = [pat.strip() for pat in exclude.split(",")] elif exclude: assert type(exclude) is list else: opts["exclude"] = [] def match_path(entry, opts): if entry.name in ALWAYS_OMIT: return False path = entry.name ok = True match = opts.get("match") exclude = opts.get("exclude") if entry.is_file() and match: assert type(match) is list ok = False for pat in match: if fnmatch.fnmatch(path, pat): ok = True break if ok and exclude: assert type(exclude) is list for pat in exclude: if fnmatch.fnmatch(path, pat): ok = False break return ok class BaseSynchronizer: _resolve_shortcuts = {"l": "local", "r": "remote", "s": "skip"} def __init__(self, local, remote, options): self.local = local self.remote = remote self.options = options or {} process_options(self.options) self.verbose = self.options.get("verbose", 3) self.dry_run = self.options.get("dry_run", False) self.is_script = None self.resolve_all = None self._stats = { "bytes_written": 0, "conflict_files": 0, "conflict_files_skipped": 0, "dirs_created": 0, "dirs_deleted": 0, "download_bytes_written": 0, "download_files_written": 0, "elap_secs": None, "elap_str": None, "entries_seen": 0, "entries_touched": 0, "files_created": 0, "files_deleted": 0, "files_written": 0, "interactive_ask": 0, "local_dirs": 0, "local_files": 0, "meta_bytes_read": 0, "meta_bytes_written": 0, "remote_dirs": 0, "remote_files": 0, "result_code": None, "upload_bytes_written": 0, "upload_files_written": 0, } def __del__(self): self.close() def get_info_strings(self): raise NotImplementedError def close(self): if self.local.connected: self.local.close() if self.remote.connected: self.remote.close() def get_stats(self): return self._stats def _inc_stat(self, name, ofs=1): self._stats[name] = self._stats.get(name, 0) + ofs def _match(self, entry): return match_path(entry, self.options) def run(self): start = time.time() info_strings = self.get_info_strings() if self.verbose >= 3: write( "{} {}\n{:>20} {}".format( info_strings[0].capitalize(), self.local.get_base_name(), info_strings[1], self.remote.get_base_name(), ) ) write( "Encoding local: {}, remote: {}".format( self.local.encoding, self.remote.encoding ) ) try: self.local.synchronizer = self.remote.synchronizer = self self.local.peer = self.remote self.remote.peer = self.local if self.dry_run: self.local.readonly = True self.local.dry_run = True self.remote.readonly = True self.remote.dry_run = True if not self.local.connected: self.local.open() if not self.remote.connected: self.remote.open() res = self._sync_dir() finally: self.local.synchronizer = self.remote.synchronizer = None self.local.peer = self.remote.peer = None self.close() stats = self._stats stats["elap_secs"] = time.time() - start stats["elap_str"] = "{:0.2f} sec".format(stats["elap_secs"]) def _add(rate, size, time): if stats.get(time) and stats.get(size): stats[rate] = "{:0.2f} kB/sec".format(0.001 * stats[size] / stats[time]) _add("upload_rate_str", "upload_bytes_written", "upload_write_time") _add("download_rate_str", "download_bytes_written", "download_write_time") return res def _compare_file(self, local, remote): assert isinstance(local, FileEntry) and isinstance(remote, FileEntry) if not local or not remote: write(" Files cannot be compared ({} != {}).".format(local, remote)) return False elif local.size != remote.size: write( " Files are different (size {:,d} != {:,d}).".format( local.size, remote.size ) ) return False with local.target.open_readable( local.name ) as fp_src, remote.target.open_readable(remote.name) as fp_dest: res, ofs = byte_compare(fp_src, fp_dest) if not res: write(" Files are different at offset {:,d}.".format(ofs)) else: write(" Files are equal.") return res def _copy_file(self, src, dest, file_entry): assert isinstance(file_entry, FileEntry) self._inc_stat("files_written") self._inc_stat("entries_touched") is_upload = dest is self.remote if is_upload: self._inc_stat("upload_files_written") else: self._inc_stat("download_files_written") self._tick() if self.dry_run: return self._dry_run_action( "copy file ({}, {} --> {})".format(file_entry, src, dest) ) elif dest.readonly: raise RuntimeError("target is read-only: {}".format(dest)) start = time.time() def __block_written(data): self._inc_stat("bytes_written", len(data)) if is_upload: self._inc_stat("upload_bytes_written", len(data)) else: self._inc_stat("download_bytes_written", len(data)) if isinstance(src, FTPTarget) and not isinstance(dest, FTPTarget): with dest.open_writable(file_entry.name) as fp_dest: src.copy_to_file(file_entry.name, fp_dest, callback=__block_written) else: with src.open_readable(file_entry.name) as fp_src: dest.write_file(file_entry.name, fp_src, callback=__block_written) dest.set_mtime(file_entry.name, file_entry.mtime, file_entry.size) dest.set_sync_info(file_entry.name, file_entry.mtime, file_entry.size) elap = time.time() - start self._inc_stat("write_time", elap) if is_upload: self._inc_stat("upload_write_time", elap) else: self._inc_stat("download_write_time", elap) return def _copy_recursive(self, src, dest, dir_entry): assert isinstance(dir_entry, DirectoryEntry) self._inc_stat("entries_touched") self._inc_stat("dirs_created") self._tick() if self.dry_run: return self._dry_run_action( "copy directory ({}, {} --> {})".format(dir_entry, src, dest) ) elif dest.readonly: raise RuntimeError("target is read-only: {}".format(dest)) dest.set_sync_info(dir_entry.name, None, None) src.push_meta() dest.push_meta() src.cwd(dir_entry.name) dest.mkdir(dir_entry.name) dest.cwd(dir_entry.name) dest.cur_dir_meta = DirMetadata(dest) for entry in src.get_dir(): self._inc_stat("entries_seen") if entry.is_dir(): self._copy_recursive(src, dest, entry) else: self._copy_file(src, dest, entry) src.flush_meta() dest.flush_meta() src.cwd("..") dest.cwd("..") src.pop_meta() dest.pop_meta() return def _remove_file(self, file_entry): assert isinstance(file_entry, FileEntry) self._inc_stat("entries_touched") self._inc_stat("files_deleted") if self.dry_run: return self._dry_run_action("delete file ({})".format(file_entry)) elif file_entry.target.readonly: raise RuntimeError("target is read-only: {}".format(file_entry.target)) file_entry.target.remove_file(file_entry.name) file_entry.target.remove_sync_info(file_entry.name) def _remove_dir(self, dir_entry): assert isinstance(dir_entry, DirectoryEntry) self._inc_stat("entries_touched") self._inc_stat("dirs_deleted") if self.dry_run: return self._dry_run_action("delete directory ({})".format(dir_entry)) elif dir_entry.target.readonly: raise RuntimeError("target is read-only: {}".format(dir_entry.target)) dir_entry.target.rmdir(dir_entry.name) dir_entry.target.remove_sync_info(dir_entry.name) def _log_action(self, action, status, symbol, entry, min_level=3): if self.verbose < min_level: return if len(symbol) > 1 and symbol[0] in (">", "<"): symbol = ( " " + symbol ) color = "" final = "" if not self.options.get("no_color"): if action in ("copy", "restore"): if "<" in symbol: if status == "new": color = ansi_code("Fore.GREEN") + ansi_code("Style.BRIGHT") else: color = ansi_code("Fore.GREEN") else: if status == "new": color = ansi_code("Fore.CYAN") + ansi_code("Style.BRIGHT") else: color = ansi_code("Fore.CYAN") elif action == "delete": color = ansi_code("Fore.RED") elif status == "conflict": color = ansi_code("Fore.LIGHTRED_EX") elif action == "skip" or status == "equal" or status == "visit": color = ansi_code("Fore.LIGHTBLACK_EX") final = ansi_code("Style.RESET_ALL") if colorama: final += colorama.ansi.clear_line(0) else: final += " " * 10 prefix = "" if self.dry_run: prefix = DRY_RUN_PREFIX if action and status: tag = ("{} {}".format(action, status)).upper() else: assert status tag = ("{}".format(status)).upper() name = entry.get_rel_path() if entry.is_dir(): name = "[{}]".format(name) write("{}{}{:<16} {:^3} {}{}".format(prefix, color, tag, symbol, name, final)) def _tick(self): if (self.verbose >= 3 and not IS_REDIRECTED) or self.options.get("progress"): stats = self.get_stats() prefix = DRY_RUN_PREFIX if self.dry_run else "" sys.stdout.write( "{}Touched {}/{} entries in {} directories...\r".format( prefix, stats["entries_touched"], stats["entries_seen"], stats["local_dirs"], ) ) sys.stdout.flush() return def _dry_run_action(self, action): return def _test_match_or_print(self, entry): if not self._match(entry): self._log_action("skip", "unmatched", "-", entry, min_level=4) return False return True def _before_sync(self, entry): self._inc_stat("entries_seen") self._tick() return True def _sync_dir(self): case_mode = self.options.get("case") local_entries = self.local.get_dir() if case_mode == "strict": local_entry_map = dict(map(lambda e: (e.name, e), local_entries)) else: local_entry_map = dict(map(lambda e: (e.name.lower(), e), local_entries)) if len(local_entry_map) != len(local_entries): raise RuntimeError( "Local target contains file names that only differ in case: " "pass `--case strict`" ) remote_entries = self.remote.get_dir() if case_mode == "strict": remote_entry_map = dict(map(lambda e: (e.name, e), remote_entries)) else: remote_entry_map = dict(map(lambda e: (e.name.lower(), e), remote_entries)) if len(remote_entry_map) != len(remote_entries): raise RuntimeError( "Remote target contains file names that only differ in case: " "pass `--case strict`" ) entry_pair_list = [] for local_entry in local_entries: if isinstance(local_entry, DirectoryEntry): self._inc_stat("local_dirs") else: self._inc_stat("local_files") if not self._before_sync(local_entry): continue if case_mode == "strict": remote_entry = remote_entry_map.get(local_entry.name) else: remote_entry = remote_entry_map.get(local_entry.name.lower()) if remote_entry and remote_entry.name != local_entry.name: if case_mode == "local": remote_entry.name = local_entry.name elif case_mode == "remote": local_entry.name = remote_entry.name else: raise RuntimeError( "Found ambigiuos name ({} != {}): " "`--case` argument is required.".format( local_entry, remote_entry ) ) entry_pair = EntryPair(local_entry, remote_entry) entry_pair_list.append(entry_pair) for remote_entry in remote_entries: if isinstance(remote_entry, DirectoryEntry): self._inc_stat("remote_dirs") else: self._inc_stat("remote_files") if not self._before_sync(remote_entry): continue if case_mode == "strict": if remote_entry.name not in local_entry_map: entry_pair = EntryPair(None, remote_entry) entry_pair_list.append(entry_pair) else: if remote_entry.name.lower() not in local_entry_map: entry_pair = EntryPair(None, remote_entry) entry_pair_list.append(entry_pair) peer_dir_meta = self.local.cur_dir_meta.peer_sync.get(self.remote.get_id()) for pair in entry_pair_list: pair.classify(peer_dir_meta) for pair in entry_pair_list: hook_result = self.re_classify_pair(pair) if not self._match(pair.any_entry): self.on_mismatch(pair) elif hook_result is not False: handler = getattr(self, "on_" + pair.operation, None) if handler: try: handler(pair) except Exception as e: if self.on_error(e, pair) is not True: raise else: raise NotImplementedError("No handler for {}".format(pair)) if pair.is_conflict(): self._inc_stat("conflict_files") self.local.flush_meta() self.remote.flush_meta() for local_dir in local_entries: if not local_dir.is_dir(): continue elif not self._before_sync(local_dir): continue remote_dir = remote_entry_map.get(local_dir.name) if remote_dir: if local_dir.was_deleted or remote_dir.was_deleted: pass else: self.local.cwd(local_dir.name) self.remote.cwd(local_dir.name) self._sync_dir() self.local.cwd("..") self.remote.cwd("..") return True def re_classify_pair(self, pair): return True def on_error(self, exc, pair): msg = "{red}ERROR: {exc}\n {pair}{reset}".format( exc=exc, pair=pair, red=ansi_code("Fore.LIGHTRED_EX"), reset=ansi_code("Style.RESET_ALL"), ) write(msg) return False def on_mismatch(self, pair): self._log_action("skip", "mismatch", "?", pair.any_entry, min_level=4) def on_equal(self, pair): self._log_action("", "equal", "=", pair.local, min_level=4) def on_copy_local(self, pair): status = pair.remote_classification self._log_action("copy", status, ">", pair.local) def on_copy_remote(self, pair): status = pair.local_classification self._log_action("copy", status, "<", pair.remote) def on_delete_local(self, pair): self._log_action("", "modified", "X< ", pair.local) def on_delete_remote(self, pair): self._log_action("", "modified", " >X", pair.remote) def on_need_compare(self, pair): self._log_action("", "different", "?", pair.local, min_level=2) def on_conflict(self, pair): self._log_action("skip", "conflict", "!", pair.local, min_level=2) class BiDirSynchronizer(BaseSynchronizer): def __init__(self, local, remote, options): super(BiDirSynchronizer, self).__init__(local, remote, options) def get_info_strings(self): return ("synchronize", "with") def _print_pair_diff(self, pair): any_entry = pair.any_entry has_meta = any_entry.get_sync_info("m") is not None write( ( VT_ERASE_LINE + ansi_code("Fore.LIGHTRED_EX") + "CONFLICT: {!r} was modified on both targets since last sync ({})." + ansi_code("Style.RESET_ALL") ).format(any_entry.get_rel_path(), _ts(any_entry.get_sync_info("u"))) ) if has_meta: write( " Original modification time: {}, size: {:,d} bytes.".format( _ts(any_entry.get_sync_info("m")), any_entry.get_sync_info("s") ) ) else: write(" (No meta data available.)") write(" Local: {}".format(pair.local.as_string() if pair.local else "n.a.")) write( " Remote: {}".format( pair.remote.as_string(pair.local) if pair.remote else "n.a." ) ) def _interactive_resolve(self, pair): if self.resolve_all: resolve = self.resolve_all else: resolve = self.options.get("resolve", "skip") if resolve in ("new", "old") and pair.is_same_time(): print("Cannot resolve using '{}' strategy: {}".format(resolve, pair)) resolve = "ask" if self.is_script else "skip" if resolve == "ask" or self.verbose >= 5: self._print_pair_diff(pair) if resolve in ("local", "remote", "old", "new", "skip"): return resolve self._inc_stat("interactive_ask") prompt = ( "Use {m}L{r}ocal, {m}R{r}emote, {m}O{r}lder, {m}N{r}ewer, " + "{m}S{r}kip, {m}B{r}inary compare, {m}H{r}elp ? " ).format( m=ansi_code("Style.BRIGHT") + ansi_code("Style.UNDERLINE"), r=ansi_code("Style.RESET_ALL"), ) while True: r = input(prompt).strip() if r in ("h", "H", "?"): print("The following keys are supported:") print(" 'b': Binary compare") print(" 'n': Use newer file") print(" 'o': Use older file") print(" 'l': Use local file") print(" 'r': Use remote file") print(" 's': Skip this file (leave both targets unchanged)") print( "Hold Shift (upper case letters) to apply choice for all " "remaining conflicts." ) print("Hit Ctrl+C to abort.") self._print_pair_diff(pair) continue elif r in ("b", "B"): self._compare_file(pair.local, pair.remote) continue elif r in ("o", "O", "n", "N") and pair.is_same_time(): print("Files have identical modification times.") continue elif r in ("L", "R", "O", "N", "S"): r = self._resolve_shortcuts[r.lower()] self.resolve_all = r break elif r in ("l", "r", "o", "n", "s"): r = self._resolve_shortcuts[r] break return r def run(self): res = super(BiDirSynchronizer, self).run() return res def on_mismatch(self, pair): self._log_action("skip", "mismatch", "?", pair.any_entry, min_level=4) def on_equal(self, pair): self._log_action("", "equal", "=", pair.local, min_level=4) def on_copy_local(self, pair): local_entry = pair.local if self._test_match_or_print(local_entry): self._log_action("copy", pair.local_classification, ">", local_entry) if pair.is_dir: self._copy_recursive(self.local, self.remote, local_entry) else: self._copy_file(self.local, self.remote, local_entry) def on_copy_remote(self, pair): remote_entry = pair.remote if self._test_match_or_print(remote_entry): self._log_action("copy", pair.remote_classification, "<", remote_entry) if pair.is_dir: self._copy_recursive(self.remote, self.local, remote_entry) else: self._copy_file(self.remote, self.local, remote_entry) def on_delete_local(self, pair): self._log_action("delete", "missing", "X< ", pair.local) if pair.is_dir: self._remove_dir(pair.local) else: self._remove_file(pair.local) def on_delete_remote(self, pair): self._log_action("delete", "missing", " >X", pair.remote) if pair.is_dir: self._remove_dir(pair.remote) else: self._remove_file(pair.remote) def on_need_compare(self, pair): c_pair = (pair.local_classification, pair.remote_classification) org_pair = c_pair org_operation = pair.operation if pair.is_dir: pair.local_classification = pair.remote_classification = "existing" pair.operation = "equal" self._log_action("", "visit", "?", pair.local, min_level=4) return elif c_pair == ("existing", "existing"): time_cmp = eps_compare( pair.local.mtime, pair.remote.mtime, FileEntry.EPS_TIME ) if time_cmp < 0: c_pair = ("unmodified", "modified") elif time_cmp > 0: c_pair = ("modified", "unmodified") elif pair.local.size == pair.remote.size: c_pair = ("unmodified", "unmodified") else: c_pair = ("modified", "modified") elif c_pair == ("new", "new"): time_cmp = eps_compare( pair.local.mtime, pair.remote.mtime, FileEntry.EPS_TIME ) if time_cmp == 0 and pair.local.size == pair.remote.size: c_pair = ("unmodified", "unmodified") else: c_pair = ("modified", "modified") pair.local_classification = c_pair[0] pair.remote_classification = c_pair[1] pair.operation = operation_map.get(c_pair) if not pair.operation: raise RuntimeError( "Undefined operation for pair classification {}".format(c_pair) ) elif pair.operation == org_operation: raise RuntimeError("Could not re-classify {}".format(org_pair)) handler = getattr(self, "on_" + pair.operation, None) res = handler(pair) return res
MIT License
huggingface/pytorch-pretrained-biggan
pytorch_pretrained_biggan/utils.py
convert_to_images
python
def convert_to_images(obj): try: import PIL except ImportError: raise ImportError("Please install Pillow to use images: pip install Pillow") if not isinstance(obj, np.ndarray): obj = obj.detach().numpy() obj = obj.transpose((0, 2, 3, 1)) obj = np.clip(((obj + 1) / 2.0) * 256, 0, 255) img = [] for i, out in enumerate(obj): out_array = np.asarray(np.uint8(out), dtype=np.uint8) img.append(PIL.Image.fromarray(out_array)) return img
Convert an output tensor from BigGAN in a list of images. Params: obj: tensor or numpy array of shape (batch_size, channels, height, width) Output: list of Pillow Images of size (height, width)
https://github.com/huggingface/pytorch-pretrained-biggan/blob/1e18aed2dff75db51428f13b940c38b923eb4a3d/pytorch_pretrained_biggan/utils.py#L36-L58
from __future__ import absolute_import, division, print_function, unicode_literals import json import logging from io import BytesIO import numpy as np from scipy.stats import truncnorm logger = logging.getLogger(__name__) NUM_CLASSES = 1000 def truncated_noise_sample(batch_size=1, dim_z=128, truncation=1., seed=None): state = None if seed is None else np.random.RandomState(seed) values = truncnorm.rvs(-2, 2, size=(batch_size, dim_z), random_state=state).astype(np.float32) return truncation * values
MIT License
bitmovin/bitmovin-api-sdk-python
bitmovin_api_sdk/models/broadcast_ts_transport_configuration.py
BroadcastTsTransportConfiguration.initial_program_clock_reference
python
def initial_program_clock_reference(self, initial_program_clock_reference): if initial_program_clock_reference is not None: if initial_program_clock_reference is not None and initial_program_clock_reference > 2576980377600: raise ValueError("Invalid value for `initial_program_clock_reference`, must be a value less than or equal to `2576980377600`") if initial_program_clock_reference is not None and initial_program_clock_reference < 0: raise ValueError("Invalid value for `initial_program_clock_reference`, must be a value greater than or equal to `0`") if not isinstance(initial_program_clock_reference, (float, int)): raise TypeError("Invalid type for `initial_program_clock_reference`, type has to be `float`") self._initial_program_clock_reference = initial_program_clock_reference
Sets the initial_program_clock_reference of this BroadcastTsTransportConfiguration. Sets the Program Clock Reference value at the beginning of the first packet for the transport stream. The PCR is specified in the timescale of 90000 :param initial_program_clock_reference: The initial_program_clock_reference of this BroadcastTsTransportConfiguration. :type: float
https://github.com/bitmovin/bitmovin-api-sdk-python/blob/79dd938804197151af7cbe5501c7ec1d97872c15/bitmovin_api_sdk/models/broadcast_ts_transport_configuration.py#L311-L329
from enum import Enum from six import string_types, iteritems from bitmovin_api_sdk.common.poscheck import poscheck_model import pprint import six class BroadcastTsTransportConfiguration(object): @poscheck_model def __init__(self, muxrate=None, stop_on_error=None, prevent_empty_adaption_fields_in_video=None, pat_repetition_rate_per_sec=None, pmt_repetition_rate_per_sec=None, variable_mux_rate=None, initial_presentation_time_stamp=None, initial_program_clock_reference=None): self._muxrate = None self._stop_on_error = None self._prevent_empty_adaption_fields_in_video = None self._pat_repetition_rate_per_sec = None self._pmt_repetition_rate_per_sec = None self._variable_mux_rate = None self._initial_presentation_time_stamp = None self._initial_program_clock_reference = None self.discriminator = None if muxrate is not None: self.muxrate = muxrate if stop_on_error is not None: self.stop_on_error = stop_on_error if prevent_empty_adaption_fields_in_video is not None: self.prevent_empty_adaption_fields_in_video = prevent_empty_adaption_fields_in_video if pat_repetition_rate_per_sec is not None: self.pat_repetition_rate_per_sec = pat_repetition_rate_per_sec if pmt_repetition_rate_per_sec is not None: self.pmt_repetition_rate_per_sec = pmt_repetition_rate_per_sec if variable_mux_rate is not None: self.variable_mux_rate = variable_mux_rate if initial_presentation_time_stamp is not None: self.initial_presentation_time_stamp = initial_presentation_time_stamp if initial_program_clock_reference is not None: self.initial_program_clock_reference = initial_program_clock_reference @property def openapi_types(self): types = { 'muxrate': 'float', 'stop_on_error': 'bool', 'prevent_empty_adaption_fields_in_video': 'bool', 'pat_repetition_rate_per_sec': 'float', 'pmt_repetition_rate_per_sec': 'float', 'variable_mux_rate': 'bool', 'initial_presentation_time_stamp': 'float', 'initial_program_clock_reference': 'float' } return types @property def attribute_map(self): attributes = { 'muxrate': 'muxrate', 'stop_on_error': 'stopOnError', 'prevent_empty_adaption_fields_in_video': 'preventEmptyAdaptionFieldsInVideo', 'pat_repetition_rate_per_sec': 'patRepetitionRatePerSec', 'pmt_repetition_rate_per_sec': 'pmtRepetitionRatePerSec', 'variable_mux_rate': 'variableMuxRate', 'initial_presentation_time_stamp': 'initialPresentationTimeStamp', 'initial_program_clock_reference': 'initialProgramClockReference' } return attributes @property def muxrate(self): return self._muxrate @muxrate.setter def muxrate(self, muxrate): if muxrate is not None: if muxrate is not None and muxrate > 1000000000: raise ValueError("Invalid value for `muxrate`, must be a value less than or equal to `1000000000`") if muxrate is not None and muxrate < 0: raise ValueError("Invalid value for `muxrate`, must be a value greater than or equal to `0`") if not isinstance(muxrate, (float, int)): raise TypeError("Invalid type for `muxrate`, type has to be `float`") self._muxrate = muxrate @property def stop_on_error(self): return self._stop_on_error @stop_on_error.setter def stop_on_error(self, stop_on_error): if stop_on_error is not None: if not isinstance(stop_on_error, bool): raise TypeError("Invalid type for `stop_on_error`, type has to be `bool`") self._stop_on_error = stop_on_error @property def prevent_empty_adaption_fields_in_video(self): return self._prevent_empty_adaption_fields_in_video @prevent_empty_adaption_fields_in_video.setter def prevent_empty_adaption_fields_in_video(self, prevent_empty_adaption_fields_in_video): if prevent_empty_adaption_fields_in_video is not None: if not isinstance(prevent_empty_adaption_fields_in_video, bool): raise TypeError("Invalid type for `prevent_empty_adaption_fields_in_video`, type has to be `bool`") self._prevent_empty_adaption_fields_in_video = prevent_empty_adaption_fields_in_video @property def pat_repetition_rate_per_sec(self): return self._pat_repetition_rate_per_sec @pat_repetition_rate_per_sec.setter def pat_repetition_rate_per_sec(self, pat_repetition_rate_per_sec): if pat_repetition_rate_per_sec is not None: if pat_repetition_rate_per_sec is not None and pat_repetition_rate_per_sec > 1000: raise ValueError("Invalid value for `pat_repetition_rate_per_sec`, must be a value less than or equal to `1000`") if pat_repetition_rate_per_sec is not None and pat_repetition_rate_per_sec < 0.001: raise ValueError("Invalid value for `pat_repetition_rate_per_sec`, must be a value greater than or equal to `0.001`") if not isinstance(pat_repetition_rate_per_sec, (float, int)): raise TypeError("Invalid type for `pat_repetition_rate_per_sec`, type has to be `float`") self._pat_repetition_rate_per_sec = pat_repetition_rate_per_sec @property def pmt_repetition_rate_per_sec(self): return self._pmt_repetition_rate_per_sec @pmt_repetition_rate_per_sec.setter def pmt_repetition_rate_per_sec(self, pmt_repetition_rate_per_sec): if pmt_repetition_rate_per_sec is not None: if pmt_repetition_rate_per_sec is not None and pmt_repetition_rate_per_sec > 1000: raise ValueError("Invalid value for `pmt_repetition_rate_per_sec`, must be a value less than or equal to `1000`") if pmt_repetition_rate_per_sec is not None and pmt_repetition_rate_per_sec < 0.001: raise ValueError("Invalid value for `pmt_repetition_rate_per_sec`, must be a value greater than or equal to `0.001`") if not isinstance(pmt_repetition_rate_per_sec, (float, int)): raise TypeError("Invalid type for `pmt_repetition_rate_per_sec`, type has to be `float`") self._pmt_repetition_rate_per_sec = pmt_repetition_rate_per_sec @property def variable_mux_rate(self): return self._variable_mux_rate @variable_mux_rate.setter def variable_mux_rate(self, variable_mux_rate): if variable_mux_rate is not None: if not isinstance(variable_mux_rate, bool): raise TypeError("Invalid type for `variable_mux_rate`, type has to be `bool`") self._variable_mux_rate = variable_mux_rate @property def initial_presentation_time_stamp(self): return self._initial_presentation_time_stamp @initial_presentation_time_stamp.setter def initial_presentation_time_stamp(self, initial_presentation_time_stamp): if initial_presentation_time_stamp is not None: if initial_presentation_time_stamp is not None and initial_presentation_time_stamp > 5400000: raise ValueError("Invalid value for `initial_presentation_time_stamp`, must be a value less than or equal to `5400000`") if initial_presentation_time_stamp is not None and initial_presentation_time_stamp < 0: raise ValueError("Invalid value for `initial_presentation_time_stamp`, must be a value greater than or equal to `0`") if not isinstance(initial_presentation_time_stamp, (float, int)): raise TypeError("Invalid type for `initial_presentation_time_stamp`, type has to be `float`") self._initial_presentation_time_stamp = initial_presentation_time_stamp @property def initial_program_clock_reference(self): return self._initial_program_clock_reference @initial_program_clock_reference.setter
MIT License
botfront/rasa-for-botfront
rasa/nlu/components.py
ComponentBuilder.__get_cached_component
python
def __get_cached_component( self, component_meta: Dict[Text, Any], model_metadata: "Metadata" ) -> Tuple[Optional[Component], Optional[Text]]: from rasa.nlu import registry component_name = component_meta.get("class", component_meta["name"]) component_class = registry.get_component_class(component_name) cache_key = component_class.cache_key(component_meta, model_metadata) if ( cache_key is not None and self.use_cache and cache_key in self.component_cache ): return self.component_cache[cache_key], cache_key return None, cache_key
Load a component from the cache, if it exists. Returns the component, if found, and the cache key.
https://github.com/botfront/rasa-for-botfront/blob/6e0e48d0059e197b5f686df1e27935769c3641b7/rasa/nlu/components.py#L755-L776
from collections import defaultdict import itertools import logging import typing from typing import Any, Dict, Hashable, List, Optional, Set, Text, Tuple, Type, Iterable import rasa.utils.train_utils from rasa.exceptions import MissingDependencyException from rasa.shared.exceptions import RasaException from rasa.shared.nlu.constants import TRAINABLE_EXTRACTORS from rasa.nlu.config import RasaNLUModelConfig from rasa.shared.exceptions import InvalidConfigException from rasa.shared.nlu.training_data.training_data import TrainingData from rasa.shared.nlu.training_data.message import Message from rasa.nlu.constants import COMPONENT_INDEX import rasa.shared.utils.io if typing.TYPE_CHECKING: from rasa.nlu.model import Metadata logger = logging.getLogger(__name__) def find_unavailable_packages(package_names: List[Text]) -> Set[Text]: import importlib failed_imports = set() for package in package_names: try: importlib.import_module(package) except ImportError: failed_imports.add(package) return failed_imports def validate_requirements(component_names: List[Optional[Text]]) -> None: from rasa.nlu import registry failed_imports = {} for component_name in component_names: if component_name is None: raise InvalidConfigException( "Your pipeline configuration contains a component that is missing " "a name. Please double check your configuration or if this is a " "custom component make sure to implement the name property for " "the component." ) component_class = registry.get_component_class(component_name) unavailable_packages = find_unavailable_packages( component_class.required_packages() ) if unavailable_packages: failed_imports[component_name] = unavailable_packages if failed_imports: dependency_component_map = defaultdict(list) for component, missing_dependencies in failed_imports.items(): for dependency in missing_dependencies: dependency_component_map[dependency].append(component) missing_lines = [ f"{d} (needed for {', '.join(cs)})" for d, cs in dependency_component_map.items() ] missing = "\n - ".join(missing_lines) raise MissingDependencyException( f"Not all required importable packages are installed to use " f"the configured NLU pipeline. " f"To use this pipeline, you need to install the " f"missing modules: \n" f" - {missing}\n" f"Please install the packages that contain the missing modules." ) def validate_component_keys( component: "Component", component_config: Dict[Text, Any] ) -> None: component_name = component_config.get("name") allowed_keys = set(component.defaults.keys()) provided_keys = set(component_config.keys()) provided_keys.discard("name") list_separator = "\n- " for key in provided_keys: if key not in allowed_keys: rasa.shared.utils.io.raise_warning( f"You have provided an invalid key `{key}` for component `{component_name}` in your pipeline. " f"Valid options for `{component_name}` are:\n- " f"{list_separator.join(allowed_keys)}" ) def validate_empty_pipeline(pipeline: List["Component"]) -> None: if len(pipeline) == 0: raise InvalidConfigException( "Can not train an empty pipeline. " "Make sure to specify a proper pipeline in " "the configuration using the 'pipeline' key." ) def validate_only_one_tokenizer_is_used(pipeline: List["Component"]) -> None: from rasa.nlu.tokenizers.tokenizer import Tokenizer tokenizer_names = [] for component in pipeline: if isinstance(component, Tokenizer): tokenizer_names.append(component.name) if len(tokenizer_names) > 1: raise InvalidConfigException( f"The pipeline configuration contains more than one tokenizer, " f"which is not possible at this time. You can only use one tokenizer. " f"The pipeline contains the following tokenizers: {tokenizer_names}. " ) def _required_component_in_pipeline( required_component: Type["Component"], pipeline: List["Component"] ) -> bool: for previous_component in pipeline: if isinstance(previous_component, required_component): return True return False def validate_required_components(pipeline: List["Component"]) -> None: for i, component in enumerate(pipeline): missing_components = [] for required_component in component.required_components(): if not _required_component_in_pipeline(required_component, pipeline[:i]): missing_components.append(required_component.name) missing_components_str = ", ".join(f"'{c}'" for c in missing_components) if missing_components: raise InvalidConfigException( f"The pipeline configuration contains errors. The component " f"'{component.name}' requires {missing_components_str} to be " f"placed before it in the pipeline. Please " f"add the required components to the pipeline." ) def validate_pipeline(pipeline: List["Component"]) -> None: validate_empty_pipeline(pipeline) validate_only_one_tokenizer_is_used(pipeline) validate_required_components(pipeline) def any_components_in_pipeline(components: Iterable[Text], pipeline: List["Component"]): return any(any(component.name == c for component in pipeline) for c in components) def validate_required_components_from_data( pipeline: List["Component"], data: TrainingData ) -> None: if data.response_examples and not any_components_in_pipeline( ["ResponseSelector"], pipeline ): rasa.shared.utils.io.raise_warning( "You have defined training data with examples for training a response " "selector, but your NLU pipeline does not include a response selector " "component. To train a model on your response selector data, add a " "'ResponseSelector' to your pipeline." ) if data.entity_examples and not any_components_in_pipeline( TRAINABLE_EXTRACTORS, pipeline ): rasa.shared.utils.io.raise_warning( "You have defined training data consisting of entity examples, but " "your NLU pipeline does not include an entity extractor trained on " "your training data. To extract non-pretrained entities, add one of " f"{TRAINABLE_EXTRACTORS} to your pipeline." ) if data.entity_examples and not any_components_in_pipeline( {"DIETClassifier", "CRFEntityExtractor"}, pipeline ): if data.entity_roles_groups_used(): rasa.shared.utils.io.raise_warning( "You have defined training data with entities that have roles/groups, " "but your NLU pipeline does not include a 'DIETClassifier' or a " "'CRFEntityExtractor'. To train entities that have roles/groups, " "add either 'DIETClassifier' or 'CRFEntityExtractor' to your " "pipeline." ) if data.regex_features and not any_components_in_pipeline( ["RegexFeaturizer", "RegexEntityExtractor"], pipeline ): rasa.shared.utils.io.raise_warning( "You have defined training data with regexes, but " "your NLU pipeline does not include a 'RegexFeaturizer' or a " "'RegexEntityExtractor'. To use regexes, include either a " "'RegexFeaturizer' or a 'RegexEntityExtractor' in your pipeline." ) if data.lookup_tables and not any_components_in_pipeline( ["RegexFeaturizer", "RegexEntityExtractor"], pipeline ): rasa.shared.utils.io.raise_warning( "You have defined training data consisting of lookup tables, but " "your NLU pipeline does not include a 'RegexFeaturizer' or a " "'RegexEntityExtractor'. To use lookup tables, include either a " "'RegexFeaturizer' or a 'RegexEntityExtractor' in your pipeline." ) if data.lookup_tables: if not any_components_in_pipeline( ["CRFEntityExtractor", "DIETClassifier"], pipeline ): rasa.shared.utils.io.raise_warning( "You have defined training data consisting of lookup tables, but " "your NLU pipeline does not include any components that use these " "features. To make use of lookup tables, add a 'DIETClassifier' or a " "'CRFEntityExtractor' with the 'pattern' feature to your pipeline." ) elif any_components_in_pipeline(["CRFEntityExtractor"], pipeline): crf_components = [c for c in pipeline if c.name == "CRFEntityExtractor"] has_pattern_feature = False for crf in crf_components: crf_features = crf.component_config.get("features") has_pattern_feature = "pattern" in itertools.chain(*crf_features) if not has_pattern_feature: rasa.shared.utils.io.raise_warning( "You have defined training data consisting of lookup tables, but " "your NLU pipeline's 'CRFEntityExtractor' does not include the " "'pattern' feature. To featurize lookup tables, add the 'pattern' " "feature to the 'CRFEntityExtractor' in your pipeline." ) if data.entity_synonyms and not any_components_in_pipeline( ["EntitySynonymMapper"], pipeline ): rasa.shared.utils.io.raise_warning( "You have defined synonyms in your training data, but " "your NLU pipeline does not include an 'EntitySynonymMapper'. " "To map synonyms, add an 'EntitySynonymMapper' to your pipeline." ) class MissingArgumentError(ValueError): def __init__(self, message: Text) -> None: super().__init__(message) self.message = message def __str__(self) -> Text: return self.message class UnsupportedLanguageError(RasaException): def __init__(self, component: Text, language: Text) -> None: self.component = component self.language = language super().__init__(component, language) def __str__(self) -> Text: return ( f"component '{self.component}' does not support language '{self.language}'." ) class ComponentMetaclass(type): @property def name(cls): return cls.__name__ class Component(metaclass=ComponentMetaclass): @property def name(self) -> Text: return type(self).name @property def unique_name(self) -> Text: index = self.component_config.get(COMPONENT_INDEX) return self.name if index is None else f"component_{index}_{self.name}" @classmethod def required_components(cls) -> List[Type["Component"]]: return [] defaults = {} supported_language_list = None not_supported_language_list = None def __init__(self, component_config: Optional[Dict[Text, Any]] = None) -> None: if not component_config: component_config = {} component_config["name"] = self.name self.component_config = rasa.utils.train_utils.override_defaults( self.defaults, component_config ) self.partial_processing_pipeline = None self.partial_processing_context = None @classmethod def required_packages(cls) -> List[Text]: return [] @classmethod def load( cls, meta: Dict[Text, Any], model_dir: Optional[Text] = None, model_metadata: Optional["Metadata"] = None, cached_component: Optional["Component"] = None, **kwargs: Any, ) -> "Component": if cached_component: return cached_component return cls(meta) @classmethod def create( cls, component_config: Dict[Text, Any], config: RasaNLUModelConfig ) -> "Component": language = config.language if not cls.can_handle_language(language): raise UnsupportedLanguageError(cls.name, language) return cls(component_config) def provide_context(self) -> Optional[Dict[Text, Any]]: pass def train( self, training_data: TrainingData, config: Optional[RasaNLUModelConfig] = None, **kwargs: Any, ) -> None: pass def process(self, message: Message, **kwargs: Any) -> None: pass def persist(self, file_name: Text, model_dir: Text) -> Optional[Dict[Text, Any]]: pass @classmethod def cache_key( cls, component_meta: Dict[Text, Any], model_metadata: "Metadata" ) -> Optional[Text]: return None def __getstate__(self) -> Any: d = self.__dict__.copy() if "partial_processing_context" in d: del d["partial_processing_context"] if "partial_processing_pipeline" in d: del d["partial_processing_pipeline"] return d def __eq__(self, other) -> bool: return self.__dict__ == other.__dict__ def prepare_partial_processing( self, pipeline: List["Component"], context: Dict[Text, Any] ) -> None: self.partial_processing_pipeline = pipeline self.partial_processing_context = context def partially_process(self, message: Message) -> Message: if self.partial_processing_context is not None: for component in self.partial_processing_pipeline: component.process(message, **self.partial_processing_context) else: logger.info("Failed to run partial processing due to missing pipeline.") return message @classmethod def can_handle_language(cls, language: Hashable) -> bool: if language is None or ( cls.supported_language_list is None and cls.not_supported_language_list is None ): return True if cls.supported_language_list and cls.not_supported_language_list: raise RasaException( "Only one of `supported_language_list` and `not_supported_language_list` can be set to not None" ) supported_language_list = ( cls.supported_language_list if cls.supported_language_list is not None else [] ) not_supported_language_list = ( cls.not_supported_language_list if cls.not_supported_language_list is not None else [] ) if not supported_language_list and not not_supported_language_list: raise RasaException( "Empty lists for both " "`supported_language_list` and `not_supported language_list` " "is not a valid setting. If you meant to allow all languages " "for the component use `None` for both of them." ) if supported_language_list: return language in supported_language_list else: return language not in not_supported_language_list C = typing.TypeVar("C", bound=Component) class ComponentBuilder: def __init__(self, use_cache: bool = True) -> None: self.use_cache = use_cache self.component_cache = {}
Apache License 2.0
morganstanley/testplan
testplan/common/utils/testing.py
captured_logging
python
def captured_logging(logger, level=logging.INFO): class LogWrapper(object): def __init__(self): self.buffer = io.StringIO() self.stream_handler = logging.StreamHandler(self.buffer) self._output = None @property def output(self): if self._output is None: self.stream_handler.flush() self._output = self.buffer.getvalue().replace("\r\n", "\n") return self._output log_wrapper = LogWrapper() log_wrapper.stream_handler.setLevel(level) logger.addHandler(log_wrapper.stream_handler) yield log_wrapper logger.removeHandler(log_wrapper.stream_handler)
Utility for capturing a logger object's output at a specific level, with a default level of INFO. Useful for command line output testing.
https://github.com/morganstanley/testplan/blob/8cb6a0ed0682698b2d6af82382fbb66d8d9e3ff7/testplan/common/utils/testing.py#L96-L121
import sys import functools import logging import pprint import os import io import warnings from lxml import objectify from contextlib import contextmanager from ..report.base import Report, ReportGroup from ..utils.comparison import is_regex import collections null_handler = logging.NullHandler() def context_wrapper(ctx_manager, *ctx_args, **ctx_kwargs): def _wrapper(func): @functools.wraps(func) def _inner(*args, **kwargs): with ctx_manager(*ctx_args, **ctx_kwargs): return func(*args, **kwargs) return _inner return _wrapper @contextmanager def argv_overridden(*override_ctx): argv_backup = list(sys.argv) sys.argv = [argv_backup[0]] + list(override_ctx) yield sys.argv = argv_backup def override_argv(*override_ctx): return context_wrapper(argv_overridden, *override_ctx) @contextmanager def log_propagation_disabled(logger): old_prop = logger.propagate logger.propagate = False logger.addHandler(null_handler) yield logger.propagate = old_prop logger.removeHandler(null_handler) def disable_log_propagation(logger): return context_wrapper(log_propagation_disabled, logger) @contextmanager def log_level_changed(logger, level): old_level = logger.level logger.setLevel(level) yield logger.setLevel(old_level) @contextmanager
Apache License 2.0
onicagroup/runway
runway/cfngin/hooks/utils.py
BlankBlueprint.create_template
python
def create_template(self) -> None:
Create template without raising NotImplementedError.
https://github.com/onicagroup/runway/blob/6e0d543512325a92265c140f386c163f6be410b4/runway/cfngin/hooks/utils.py#L28-L29
from __future__ import annotations import collections.abc import logging import os import sys from typing import TYPE_CHECKING, Any, Dict, List, cast import pydantic from ...exceptions import FailedVariableLookup from ...utils import BaseModel, load_object_from_string from ...variables import Variable, resolve_variables from ..blueprints.base import Blueprint if TYPE_CHECKING: from ...config.models.cfngin import CfnginHookDefinitionModel from ...context import CfnginContext from ..providers.aws.default import Provider LOGGER = logging.getLogger(__name__) class BlankBlueprint(Blueprint):
Apache License 2.0
ganeti/ganeti
lib/hypervisor/hv_xen.py
_IsCrashed
python
def _IsCrashed(instance_info): return instance_info.count('c') > 0
Returns whether an instance is in the crashed Xen state. When a horrible misconfiguration happens to a Xen domain, it can crash, meaning that it encounters a violent ending. While this state usually flashes only temporarily before the domain is restarted, being able to check for it allows Ganeti not to act confused and do something about it.
https://github.com/ganeti/ganeti/blob/4d21019c72cba4d746f5d17ca22098f4c7682e9c/lib/hypervisor/hv_xen.py#L303-L312
import logging import errno import os import string import shutil import time from io import StringIO from ganeti import constants from ganeti import errors from ganeti import utils from ganeti.hypervisor import hv_base from ganeti import netutils from ganeti import objects from ganeti import pathutils XEND_CONFIG_FILE = utils.PathJoin(pathutils.XEN_CONFIG_DIR, "xend-config.sxp") XL_CONFIG_FILE = utils.PathJoin(pathutils.XEN_CONFIG_DIR, "xen/xl.conf") VIF_BRIDGE_SCRIPT = utils.PathJoin(pathutils.XEN_CONFIG_DIR, "scripts/vif-bridge") _DOM0_NAME = "Domain-0" _DISK_LETTERS = string.ascii_lowercase _FILE_DRIVER_MAP = { constants.FD_LOOP: "file", constants.FD_BLKTAP: "tap:aio", constants.FD_BLKTAP2: "tap2:tapdisk:aio", } def _CreateConfigCpus(cpu_mask): cpu_list = utils.ParseMultiCpuMask(cpu_mask) if len(cpu_list) == 1: all_cpu_mapping = cpu_list[0] if all_cpu_mapping == constants.CPU_PINNING_OFF: return None else: return "cpu = \"%s\"" % ",".join(map(str, all_cpu_mapping)) else: def _GetCPUMap(vcpu): if vcpu[0] == constants.CPU_PINNING_ALL_VAL: cpu_map = constants.CPU_PINNING_ALL_XEN else: cpu_map = ",".join(map(str, vcpu)) return "\"%s\"" % cpu_map return "cpus = [ %s ]" % ", ".join(map(_GetCPUMap, cpu_list)) def _RunInstanceList(fn, instance_list_errors): result = fn() if result.failed: logging.error("Retrieving the instance list from xen failed (%s): %s", result.fail_reason, result.output) instance_list_errors.append(result) raise utils.RetryAgain() return result.stdout.splitlines() class _InstanceCrashed(errors.GenericError): def _ParseInstanceList(lines, include_node): result = [] for line in lines[1:]: data = line.split() if len(data) != 6: raise errors.HypervisorError("Can't parse instance list," " line: %s" % line) try: data[1] = int(data[1]) data[2] = int(data[2]) data[3] = int(data[3]) data[4] = _XenToHypervisorInstanceState(data[4]) data[5] = float(data[5]) except (TypeError, ValueError) as err: raise errors.HypervisorError("Can't parse instance list," " line: %s, error: %s" % (line, err)) except _InstanceCrashed: continue if include_node or data[0] != _DOM0_NAME: result.append(data) return result def _InstanceDomID(info): return info[1] def _InstanceRunning(info): return info[4] == hv_base.HvInstanceState.RUNNING def _InstanceRuntime(info): return info[5] def _GetAllInstanceList(fn, include_node, delays, timeout): instance_list_errors = [] try: lines = utils.Retry(_RunInstanceList, delays, timeout, args=(fn, instance_list_errors)) except utils.RetryTimeout: if instance_list_errors: instance_list_result = instance_list_errors.pop() errmsg = ("listing instances failed, timeout exceeded (%s): %s" % (instance_list_result.fail_reason, instance_list_result.output)) else: errmsg = "listing instances failed" raise errors.HypervisorError(errmsg) return _ParseInstanceList(lines, include_node) def _IsInstanceRunning(instance_info): allowable_running_prefixes = [ "r--", "rb-", "-b-", "---", ] def _RunningWithSuffix(suffix): return [x + suffix for x in allowable_running_prefixes] return instance_info in _RunningWithSuffix("---") or instance_info in _RunningWithSuffix("ss-") or instance_info in _RunningWithSuffix("sr-") or instance_info == "-----d" def _IsInstanceShutdown(instance_info): return instance_info == "---s--" or instance_info == "---s-d" def _IgnorePaused(instance_info): return instance_info.replace('p', '-')
BSD 2-Clause Simplified License
petercorke/bdsim
bdsim/blocks/discrete.py
DIntegrator.__init__
python
def __init__(self, clock, x0=0, gain=1.0, min=None, max=None, **kwargs): super().__init__(clock=clock, **kwargs) if isinstance(x0, (int, float)): self.ndstates = 1 if min is None: min = -math.inf if max is None: max = math.inf else: if isinstance(x0, np.ndarray): if x0.ndim > 1: raise ValueError('state must be a 1D vector') else: x0 = base.getvector(x0) self.ndstates = x0.shape[0] if min is None: min = [-math.inf] * self.nstates elif len(min) != self.nstates: raise ValueError('minimum bound length must match x0') if max is None: max = [math.inf] * self.nstates elif len(max) != self.nstates: raise ValueError('maximum bound length must match x0') self._x0 = np.r_[x0] self.min = np.r_[min] self.max = np.r_[max] self.gain = gain print('nstates', self.nstates)
:param x0: Initial state, defaults to 0 :type x0: array_like, optional :param min: Minimum value of state, defaults to None :type min: float or array_like, optional :param max: Maximum value of state, defaults to None :type max: float or array_like, optional :param kwargs: common Block options :return: an INTEGRATOR block :rtype: Integrator instance Create an integrator block. Output is the time integral of the input. The state can be a scalar or a vector, this is given by the type of ``x0``. The minimum and maximum values can be: - a scalar, in which case the same value applies to every element of the state vector, or - a vector, of the same shape as ``x0`` that applies elementwise to the state.
https://github.com/petercorke/bdsim/blob/0b6afee0ae889e39a4be036f2aea8c2261cce0f8/bdsim/blocks/discrete.py#L110-L165
import numpy as np import math from math import sin, cos, atan2, sqrt, pi import matplotlib.pyplot as plt import inspect from spatialmath import base from bdsim.components import ClockedBlock class ZOH(ClockedBlock): nin = 1 nout = 1 def __init__(self, clock, x0=0, min=None, max=None, **kwargs): self.type = 'sampler' super().__init__(nin=1, nout=1, clock=clock, **kwargs) x0 = base.getvector(x0) self._x0 = x0 self.ndstates = len(x0) def output(self, t=None): return [self._x] def next(self): xnext = np.array(self.inputs) return xnext class DIntegrator(ClockedBlock): nin = 1 nout = 1
MIT License
purestorage-openconnect/py-pure-client
pypureclient/flashblade/FB_2_1/models/smtp_server_get_response.py
SmtpServerGetResponse.to_dict
python
def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): if hasattr(self, attr): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(SmtpServerGetResponse, dict): for key, value in self.items(): result[key] = value return result
Returns the model properties as a dict
https://github.com/purestorage-openconnect/py-pure-client/blob/2d9fdef0b73321cea9613e7d1eb881b42845099b/pypureclient/flashblade/FB_2_1/models/smtp_server_get_response.py#L78-L104
import pprint import re import six import typing from ....properties import Property if typing.TYPE_CHECKING: from pypureclient.flashblade.FB_2_1 import models class SmtpServerGetResponse(object): swagger_types = { 'continuation_token': 'str', 'total_item_count': 'int', 'items': 'list[SmtpServer]' } attribute_map = { 'continuation_token': 'continuation_token', 'total_item_count': 'total_item_count', 'items': 'items' } required_args = { } def __init__( self, continuation_token=None, total_item_count=None, items=None, ): if continuation_token is not None: self.continuation_token = continuation_token if total_item_count is not None: self.total_item_count = total_item_count if items is not None: self.items = items def __setattr__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `SmtpServerGetResponse`".format(key)) self.__dict__[key] = value def __getattribute__(self, item): value = object.__getattribute__(self, item) if isinstance(value, Property): return None else: return value
BSD 2-Clause Simplified License
avigad/boole
boole/core/expr.py
Kind.accept
python
def accept(self, visitor, *args, **kwargs): return visitor.visit_kind(self, *args, **kwargs)
The accept method allows the definition of recursive functions over objects of type expr. Arguments: - `visitor`: an object of class ExprVisitor - `*args`: arguments to the visitor instance - `**kwargs`: named arguments to the visitor instance
https://github.com/avigad/boole/blob/2a436c2967dbc968f6a5877c220b9757c3bc17c3/boole/core/expr.py#L201-L210
from expr_base import * import vargen class Const(Expr): def __init__(self, name, type, value=None, **kwargs): Expr.__init__(self) self.name = name self.type = type self.value = value for k in kwargs: self.info[k] = kwargs[k] self._hash = hash(('Const', self.name, self.type)) def accept(self, visitor, *args, **kwargs): return visitor.visit_const(self, *args, **kwargs) def to_string(self): return self.name def is_const(self): return True def eq(self, expr): if expr.is_const(): return self.name == expr.name else: return False class DB(Expr): def __init__(self, index): Expr.__init__(self) self.index = index self._hash = hash(("DB", self.index)) def incr(self, inc): self.index += inc def decr(self): if self.index == 0: raise ExprError("Cannot decrement a DB\ variable with index 0", self) else: self.index -= 1 def accept(self, visitor, *args, **kwargs): return visitor.visit_db(self, *args, **kwargs) def to_string(self): return "DB({0!s})".format(self.index) def is_db(self): return True def eq(self, expr): if expr.is_db(): return self.index == expr.index else: return False class Type(Expr): def __init__(self): Expr.__init__(self) self.name = 'Type' self._hash = hash('Type') def accept(self, visitor, *args, **kwargs): return visitor.visit_type(self, *args, **kwargs) def to_string(self): return "Type()" def is_type(self): return True def eq(self, expr): return expr.is_type() class Kind(Expr): def __init__(self): Expr.__init__(self) self._hash = hash('Kind')
Apache License 2.0
its-django/mysite
mysite/restaurants/views.py
RestaurantsView.dispatch
python
def dispatch(self, request, *args, **kwargs): return super(RestaurantsView, self).dispatch(request, *args, **kwargs)
return decorated dispatch :request: request :returns: return origin dispatch
https://github.com/its-django/mysite/blob/8faa84867af5645d3d3d8e67fe8020be4dc68551/mysite/restaurants/views.py#L45-L51
from django.http import HttpResponseRedirect from django.shortcuts import render_to_response, render from django.utils import timezone from django.contrib.auth.decorators import login_required, user_passes_test from django.views.generic.list import ListView from django.utils.decorators import method_decorator from django.views.generic.detail import DetailView, SingleObjectMixin from django.views.generic.edit import FormView from restaurants.models import Restaurant, Comment from restaurants.forms import CommentForm from restaurants.permissions import user_can_comment class MenuView(DetailView): model = Restaurant template_name = 'menu.html' context_object_name = 'restaurant' @method_decorator(login_required) def dispatch(self, request, *args, **kwargs): return super(MenuView, self).dispatch(request, *args, **kwargs) class RestaurantsView(ListView): model = Restaurant template_name = 'restaurants_list.html' context_object_name = 'restaurants' @method_decorator(login_required)
Apache License 2.0
edx/django-user-tasks
user_tasks/signals.py
start_user_task
python
def start_user_task(sender=None, **kwargs): try: current_connection = transaction.get_connection() except Exception: current_connection = None if current_connection and (not current_connection.in_atomic_block): close_old_connections() if isinstance(sender, UserTaskMixin): sender.status.start()
Update the status record when execution of a :py:class:`UserTaskMixin` begins.
https://github.com/edx/django-user-tasks/blob/b2e1d5c3e6b2b7d3ff0b67d6b8aedd1a6c29b5cf/user_tasks/signals.py#L193-L215
import logging from uuid import uuid4 from celery.signals import before_task_publish, task_failure, task_prerun, task_retry, task_success from django.contrib.auth import get_user_model from django.db import close_old_connections, transaction from django.utils.module_loading import import_string from user_tasks import user_task_stopped from .exceptions import TaskCanceledException from .models import UserTaskStatus from .tasks import UserTaskMixin LOGGER = logging.getLogger(__name__) @before_task_publish.connect def create_user_task(sender=None, body=None, **kwargs): try: task_class = import_string(sender) except ImportError: return if issubclass(task_class.__class__, UserTaskMixin): arguments_dict = task_class.arguments_as_dict(*body['args'], **body['kwargs']) user_id = _get_user_id(arguments_dict) task_id = body['id'] if body.get('callbacks', []): _create_chain_entry(user_id, task_id, task_class, body['args'], body['kwargs'], body['callbacks']) return if body.get('chord', None): _create_chord_entry(task_id, task_class, body, user_id) return parent = _get_or_create_group_parent(body, user_id) name = task_class.generate_name(arguments_dict) total_steps = task_class.calculate_total_steps(arguments_dict) UserTaskStatus.objects.get_or_create( task_id=task_id, defaults={'user_id': user_id, 'parent': parent, 'name': name, 'task_class': sender, 'total_steps': total_steps}) if parent: parent.increment_total_steps(total_steps) def _create_chain_entry(user_id, task_id, task_class, args, kwargs, callbacks, parent=None): LOGGER.debug(task_class) if issubclass(task_class.__class__, UserTaskMixin): arguments_dict = task_class.arguments_as_dict(*args, **kwargs) name = task_class.generate_name(arguments_dict) total_steps = task_class.calculate_total_steps(arguments_dict) parent_name = kwargs.get('user_task_name', '') with transaction.atomic(): if parent is None: parent = UserTaskStatus.objects.create( is_container=True, name=parent_name, task_class='celery.chain', task_id=str(uuid4()), total_steps=0, user_id=user_id) UserTaskStatus.objects.create( name=name, parent=parent, task_class=task_class, task_id=task_id, total_steps=total_steps, user_id=user_id) parent.increment_total_steps(total_steps) if parent_name and not parent.name: parent.set_name(parent_name) for callback in callbacks: callback_class = import_string(callback['task']) _create_chain_entry( user_id, callback['options']['task_id'], callback_class, callback['args'], callback['kwargs'], callback['options'].get('link', []), parent=parent ) def _create_chord_entry(task_id, task_class, message_body, user_id): args = message_body['args'] kwargs = message_body['kwargs'] arguments_dict = task_class.arguments_as_dict(*args, **kwargs) name = task_class.generate_name(arguments_dict) total_steps = task_class.calculate_total_steps(arguments_dict) parent_name = kwargs.get('user_task_name', '') chord_data = message_body['chord'] group_id = message_body['taskset'] with transaction.atomic(): group, created = UserTaskStatus.objects.get_or_create( task_id=group_id, defaults={'is_container': True, 'name': parent_name, 'task_class': 'celery.group', 'total_steps': total_steps, 'user_id': user_id}) if created: chord = UserTaskStatus.objects.create( is_container=True, name=parent_name, task_class='celery.chord', task_id=str(uuid4()), total_steps=total_steps, user_id=user_id) group.parent = chord group.save(update_fields={'parent', 'modified'}) else: chord = None group.increment_total_steps(total_steps) if parent_name and not group.name: group.set_name(parent_name) UserTaskStatus.objects.create( name=name, parent=group, task_class=task_class, task_id=task_id, total_steps=total_steps, user_id=user_id) if not created: return task_id = chord_data['options']['task_id'] body_task = chord_data['task'] body_class = import_string(body_task).__class__ if not issubclass(body_class, UserTaskMixin): return args = chord_data['args'] kwargs = chord_data['kwargs'] arguments_dict = body_class.arguments_as_dict(*args, **kwargs) name = body_class.generate_name(arguments_dict) total_steps = body_class.calculate_total_steps(arguments_dict) UserTaskStatus.objects.get_or_create( task_id=task_id, defaults={'name': name, 'parent': chord, 'task_class': body_task, 'total_steps': total_steps, 'user_id': user_id}) chord.increment_total_steps(total_steps) def _get_or_create_group_parent(message_body, user_id): parent_id = message_body.get('taskset', None) if not parent_id: return None parent_class = 'celery.group' parent_name = message_body['kwargs'].get('user_task_name', '') parent, _ = UserTaskStatus.objects.get_or_create( task_id=parent_id, defaults={'is_container': True, 'name': parent_name, 'task_class': parent_class, 'total_steps': 0, 'user_id': user_id}) if parent_name and not parent.name: parent.name = parent_name parent.save(update_fields={'name', 'modified'}) return parent def _get_user_id(arguments_dict): if 'user_id' not in arguments_dict: raise TypeError('Each invocation of a UserTaskMixin subclass must include the user_id') user_id = arguments_dict['user_id'] try: get_user_model().objects.get(pk=user_id) except (ValueError, get_user_model().DoesNotExist) as import_exception: raise TypeError(f'Invalid user_id: {user_id}') from import_exception return user_id @task_prerun.connect
Apache License 2.0
cdriehuys/django-rest-email-auth
rest_email_auth/app_settings.py
AppSettings.CONFIRMATION_EXPIRATION
python
def CONFIRMATION_EXPIRATION(self): import datetime return self._setting( "CONFIRMATION_EXPIRATION", datetime.timedelta(days=1) )
The duration that an email confirmation is valid for. Defaults to 1 day.
https://github.com/cdriehuys/django-rest-email-auth/blob/9310b68cf24b6bf16cd0177d6b19ab0470382244/rest_email_auth/app_settings.py#L45-L55
import sys class AppSettings(object): def __init__(self): assert self.EMAIL_VERIFICATION_URL assert self.PASSWORD_RESET_URL def _setting(self, name, default): from django.conf import settings settings_dict = getattr(settings, "REST_EMAIL_AUTH", {}) return settings_dict.get(name, default) @property
MIT License
adamlwgriffiths/pyglet
pyglet/font/__init__.py
GlyphString.get_break_index
python
def get_break_index(self, from_index, width): to_index = from_index if from_index >= len(self.text): return from_index if from_index: width += self.cumulative_advance[from_index-1] for i, (c, w) in enumerate( zip(self.text[from_index:], self.cumulative_advance[from_index:])): if c in u'\u0020\u200b': to_index = i + from_index + 1 if c == '\n': return i + from_index + 1 if w > width: return to_index return to_index
Find a breakpoint within the text for a given width. Returns a valid breakpoint after `from_index` so that the text between `from_index` and the breakpoint fits within `width` pixels. This method uses precomputed cumulative glyph widths to give quick answer, and so is much faster than `pyglet.font.base.Font.get_glyphs_for_width`. :Parameters: `from_index` : int Index of text to begin at, or 0 for the beginning of the string. `width` : float Maximum width to use. :rtype: int :return: the index of text which will be used as the breakpoint, or `from_index` if there is no valid breakpoint.
https://github.com/adamlwgriffiths/pyglet/blob/18bd86a8f235e4f5edd94b0d38073d0e5477a366/pyglet/font/__init__.py#L149-L184
__docformat__ = 'restructuredtext' __version__ = '$Id$' import sys import os import math import weakref import pyglet from pyglet.gl import * from pyglet import gl from pyglet import image from pyglet import window class GlyphString(object): def __init__(self, text, glyphs, x=0, y=0): lst = [] texture = None self.text = text self.states = [] self.cumulative_advance = [] state_from = 0 state_length = 0 for i, glyph in enumerate(glyphs): if glyph.owner != texture: if state_length: self.states.append((state_from, state_length, texture)) texture = glyph.owner state_from = i state_length = 0 state_length += 1 t = glyph.tex_coords lst += [t[0], t[1], t[2], 1., x + glyph.vertices[0], y + glyph.vertices[1], 0., 1., t[3], t[4], t[5], 1., x + glyph.vertices[2], y + glyph.vertices[1], 0., 1., t[6], t[7], t[8], 1., x + glyph.vertices[2], y + glyph.vertices[3], 0., 1., t[9], t[10], t[11], 1., x + glyph.vertices[0], y + glyph.vertices[3], 0., 1.] x += glyph.advance self.cumulative_advance.append(x) self.states.append((state_from, state_length, texture)) self.array = (c_float * len(lst))(*lst) self.width = x
BSD 3-Clause New or Revised License
pnnl/safekit
safekit/features/lanl/lanl_agg_features.py
part_of_day
python
def part_of_day(seconds): time_of_day_in_seconds = int(seconds) % 86400 daypart = time_of_day_in_seconds / 21600 return daymap[daypart]
:param seconds: :return:
https://github.com/pnnl/safekit/blob/92c004bc72f1480a4f9b26d304a900cbc8dea48d/safekit/features/lanl/lanl_agg_features.py#L82-L90
import os import sys cyberpath = '/'.join(os.path.realpath(__file__).split('/')[:-4]) sys.path.insert(0, cyberpath) from safekit.features import merge_streams import argparse from itertools import product from collections import Counter from pprint import pprint import numpy as np def return_parser(): parser = argparse.ArgumentParser("Crisp aggregate feature derivation script.") parser.add_argument('-datapath', type=str, help='Path to files to transliterate.') parser.add_argument('-outfile', type=str, help='Where to write derived features.') parser.add_argument('-redpath', type=str, help='Where the json of completely specified redteam events is.') return parser def popularness(id, counter): if float(counter[id])/float(counter['total']) < .05: return 'unpop_' else: return 'common_' def gen_popularness(id, counter): diff = counter['mean'] - counter[id] if diff <= 0: return 'common_' else: return 'unpop_' class ID: def __init__(self): self.id = 0 self.map = {} def __call__(self, value): if value not in self.map: eyedee = self.id self.map[value] = self.id self.id += 1 else: eyedee = self.map[value] return eyedee def second_to_day(seconds): day = int(seconds)/86400 assert day < 58, 'Too many seconds, reached day %s' % day return day daymap = {0: '12am-6am', 1: '6am-12pm', 2: '12pm-6pm', 3: '6pm-12am'}
MIT License
cohesity/management-sdk-python
cohesity_management_sdk/models/delete_protection_job_param.py
DeleteProtectionJobParam.__init__
python
def __init__(self, delete_snapshots=None): self.delete_snapshots = delete_snapshots
Constructor for the DeleteProtectionJobParam class
https://github.com/cohesity/management-sdk-python/blob/1c085d5a10f5f1a87b700e7ad1fc1dcabda41ae5/cohesity_management_sdk/models/delete_protection_job_param.py#L22-L27
class DeleteProtectionJobParam(object): _names = { "delete_snapshots":'deleteSnapshots' }
Apache License 2.0
tmm1/graphite
webapp/graphite/graphlot/views.py
graphlot_render
python
def graphlot_render(request): metrics = [] for target in request.GET.getlist('target'): metrics.append(dict(name=target, yaxis="one")) for target in request.GET.getlist('y2target'): metrics.append(dict(name=target, yaxis="two")) untiltime = request.GET.get('until', "-0hour") fromtime = request.GET.get('from', "-24hour") events = request.GET.get('events', "") context = { 'metric_list' : metrics, 'fromtime' : fromtime, 'untiltime' : untiltime, 'events' : events, 'slash' : get_script_prefix() } return render_to_response("graphlot.html", context)
Render the main graphlot view.
https://github.com/tmm1/graphite/blob/8f17c2c48412270b8f4e849b16bf55866f5084cb/webapp/graphite/graphlot/views.py#L15-L33
import re from django.shortcuts import render_to_response from django.http import HttpResponse, Http404, HttpResponseBadRequest from django.conf import settings import simplejson from graphite.render.views import parseOptions from graphite.render.evaluator import evaluateTarget from graphite.storage import STORE from django.core.urlresolvers import get_script_prefix
Apache License 2.0
wikimedia/pywikibot
tests/wikibase_tests.py
TestWbQuantityNonDry.setUp
python
def setUp(self): super().setUp() self.repo = self.get_repo() self.version = self.repo.mw_version
Override setup to store repo and it's version.
https://github.com/wikimedia/pywikibot/blob/5097f5b9a7ef9d39f35f17edd11faf3086a01d1d/tests/wikibase_tests.py#L473-L477
import copy import json import unittest from contextlib import suppress from decimal import Decimal import pywikibot from pywikibot import pagegenerators from pywikibot.exceptions import ( InvalidTitleError, IsNotRedirectPageError, IsRedirectPageError, NoPageError, UnknownExtensionError, WikiBaseError, ) from pywikibot.page import ItemPage, Page, PropertyPage, WikibasePage from pywikibot.site import Namespace, NamespacesDict from pywikibot.tools import MediaWikiVersion, suppress_warnings from tests import WARN_SITE_CODE, join_pages_path, mock from tests.aspects import TestCase, WikidataTestCase from tests.basepage import ( BasePageLoadRevisionsCachingTestBase, BasePageMethodsTestBase, ) def _get_test_unconnected_page(site): gen = pagegenerators.NewpagesPageGenerator(site=site, total=10, namespaces=[1, ]) for page in gen: if not page.properties().get('wikibase_item'): return page class WbRepresentationTestCase(WikidataTestCase): def _test_hashable(self, representation): list_of_dupes = [representation, representation] self.assertLength(set(list_of_dupes), 1) class TestLoadRevisionsCaching(BasePageLoadRevisionsCachingTestBase, WikidataTestCase): def setUp(self): self._page = ItemPage(self.get_repo(), 'Q60') super().setUp() def test_page_text(self): with suppress_warnings(WARN_SITE_CODE, category=UserWarning): self._test_page_text() class TestGeneral(WikidataTestCase): @classmethod def setUpClass(cls): super().setUpClass() enwiki = pywikibot.Site('en', 'wikipedia') cls.mainpage = pywikibot.Page(pywikibot.page.Link('Main Page', enwiki)) def testWikibase(self): repo = self.get_repo() item_namespace = repo.namespaces[0] self.assertEqual(item_namespace.defaultcontentmodel, 'wikibase-item') item = ItemPage.fromPage(self.mainpage) self.assertIsInstance(item, ItemPage) self.assertEqual(item.getID(), 'Q5296') self.assertEqual(item.title(), 'Q5296') self.assertIn('en', item.labels) self.assertTrue(item.labels['en'].lower().endswith('main page')) self.assertIn('en', item.aliases) self.assertIn('home page', (a.lower() for a in item.aliases['en'])) self.assertEqual(item.namespace(), 0) item2 = ItemPage(repo, 'q5296') self.assertEqual(item2.getID(), 'Q5296') item2.get() self.assertTrue(item2.labels['en'].lower().endswith('main page')) prop = PropertyPage(repo, 'Property:P21') self.assertEqual(prop.type, 'wikibase-item') self.assertEqual(prop.namespace(), 120) claim = pywikibot.Claim(repo, 'p21') regex = r' is not type .+\.$' with self.assertRaisesRegex(ValueError, regex): claim.setTarget(value='test') claim.setTarget(ItemPage(repo, 'q1')) self.assertEqual(claim._formatValue(), {'entity-type': 'item', 'numeric-id': 1}) def test_cmp(self): self.assertEqual(ItemPage.fromPage(self.mainpage), ItemPage(self.get_repo(), 'q5296')) class TestWikibaseCoordinate(WbRepresentationTestCase): dry = True def test_Coordinate_WbRepresentation_methods(self): repo = self.get_repo() coord = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0, globe='moon') self._test_hashable(coord) def test_Coordinate_dim(self): repo = self.get_repo() x = pywikibot.Coordinate(site=repo, lat=12.0, lon=13.0, precision=5.0) self.assertEqual(x.precisionToDim(), 544434) self.assertIsInstance(x.precisionToDim(), int) y = pywikibot.Coordinate(site=repo, lat=12.0, lon=13.0, dim=54444) self.assertEqual(y.precision, 0.500005084017101) self.assertIsInstance(y.precision, float) z = pywikibot.Coordinate(site=repo, lat=12.0, lon=13.0) regex = r'^No values set for dim or precision$' with self.assertRaisesRegex(ValueError, regex): z.precisionToDim() def test_Coordinate_plain_globe(self): repo = self.get_repo() coord = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0, globe='moon') self.assertEqual(coord.toWikibase(), {'latitude': 12.0, 'longitude': 13.0, 'altitude': None, 'precision': 0, 'globe': 'http://www.wikidata.org/entity/Q405'}) def test_Coordinate_entity_uri_globe(self): repo = self.get_repo() coord = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0, globe_item='http://www.wikidata.org/entity/Q123') self.assertEqual(coord.toWikibase(), {'latitude': 12.0, 'longitude': 13.0, 'altitude': None, 'precision': 0, 'globe': 'http://www.wikidata.org/entity/Q123'}) class TestWikibaseCoordinateNonDry(WbRepresentationTestCase): def test_Coordinate_item_globe(self): repo = self.get_repo() coord = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0, globe_item=ItemPage(repo, 'Q123')) self.assertEqual(coord.toWikibase(), {'latitude': 12.0, 'longitude': 13.0, 'altitude': None, 'precision': 0, 'globe': 'http://www.wikidata.org/entity/Q123'}) def test_Coordinate_get_globe_item_from_uri(self): repo = self.get_repo() q = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0, globe_item='http://www.wikidata.org/entity/Q123') self.assertEqual(q.get_globe_item(), ItemPage(repo, 'Q123')) def test_Coordinate_get_globe_item_from_itempage(self): repo = self.get_repo() globe = ItemPage(repo, 'Q123') q = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0, globe_item=globe) self.assertEqual(q.get_globe_item(), ItemPage(repo, 'Q123')) def test_Coordinate_get_globe_item_from_plain_globe(self): repo = self.get_repo() q = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0, globe='moon') self.assertEqual(q.get_globe_item(), ItemPage(repo, 'Q405')) def test_Coordinate_get_globe_item_provide_repo(self): repo = self.get_repo() q = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0, globe_item='http://www.wikidata.org/entity/Q123') self.assertEqual(q.get_globe_item(repo), ItemPage(repo, 'Q123')) def test_Coordinate_get_globe_item_different_repo(self): repo = self.get_repo() test_repo = pywikibot.Site('test', 'wikidata') q = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0, globe_item='http://test.wikidata.org/entity/Q123') self.assertEqual(q.get_globe_item(test_repo), ItemPage(test_repo, 'Q123')) def test_Coordinate_equality(self): repo = self.get_repo() a = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0.1, globe='moon') b = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0.1, globe_item='http://www.wikidata.org/entity/Q405') c = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0.1, globe_item=ItemPage(repo, 'Q405')) d = pywikibot.Coordinate( site=repo, lat=12.0, lon=13.0, precision=0.1, globe_item='http://test.wikidata.org/entity/Q405') self.assertEqual(a, b) self.assertEqual(b, c) self.assertEqual(c, a) self.assertNotEqual(a, d) self.assertNotEqual(b, d) self.assertNotEqual(c, d) class TestWbTime(WbRepresentationTestCase): dry = True def test_WbTime_WbRepresentation_methods(self): repo = self.get_repo() t = pywikibot.WbTime(site=repo, year=2010, month=0, day=0, hour=12, minute=43) self._test_hashable(t) def test_WbTime_timestr(self): repo = self.get_repo() t = pywikibot.WbTime(site=repo, year=2010, month=0, day=0, hour=12, minute=43) self.assertEqual(t.toTimestr(), '+00000002010-00-00T12:43:00Z') self.assertEqual(t.toTimestr(force_iso=True), '+2010-01-01T12:43:00Z') t = pywikibot.WbTime(site=repo, year=2010, hour=12, minute=43) self.assertEqual(t.toTimestr(), '+00000002010-01-01T12:43:00Z') self.assertEqual(t.toTimestr(force_iso=True), '+2010-01-01T12:43:00Z') t = pywikibot.WbTime(site=repo, year=-2010, hour=12, minute=43) self.assertEqual(t.toTimestr(), '-00000002010-01-01T12:43:00Z') self.assertEqual(t.toTimestr(force_iso=True), '-2010-01-01T12:43:00Z') def test_WbTime_fromTimestr(self): repo = self.get_repo() t = pywikibot.WbTime.fromTimestr('+00000002010-01-01T12:43:00Z', site=repo) self.assertEqual(t, pywikibot.WbTime(site=repo, year=2010, hour=12, minute=43, precision=14)) def test_WbTime_zero_month(self): repo = self.get_repo() t = pywikibot.WbTime.fromTimestr('+00000002010-00-00T12:43:00Z', site=repo) self.assertEqual(t, pywikibot.WbTime(site=repo, year=2010, month=0, day=0, hour=12, minute=43, precision=14)) def test_WbTime_timestamp(self): repo = self.get_repo() timestamp = pywikibot.Timestamp.fromISOformat('2010-01-01T12:43:00Z') t = pywikibot.WbTime(site=repo, year=2010, month=0, day=0, hour=12, minute=43) self.assertEqual(t.toTimestamp(), timestamp) self.assertNotEqual( t, pywikibot.WbTime.fromTimestamp(timestamp, site=repo)) t = pywikibot.WbTime(site=repo, year=2010, hour=12, minute=43) self.assertEqual(t.toTimestamp(), timestamp) t = pywikibot.WbTime(site=repo, year=-2010, hour=12, minute=43) regex = r'^You cannot turn BC dates into a Timestamp$' with self.assertRaisesRegex(ValueError, regex): t.toTimestamp() t = pywikibot.WbTime(site=repo, year=2010, month=1, day=1, hour=12, minute=43, second=0) self.assertEqual(t.toTimestamp(), timestamp) self.assertEqual( t, pywikibot.WbTime.fromTimestamp(timestamp, site=repo)) def test_WbTime_errors(self): repo = self.get_repo() regex = r'^no year given$' with self.assertRaisesRegex(ValueError, regex): pywikibot.WbTime(site=repo, precision=15) with self.assertRaisesRegex(ValueError, regex): pywikibot.WbTime(site=repo, precision='invalid_precision') class TestWbQuantity(WbRepresentationTestCase): dry = True def test_WbQuantity_WbRepresentation_methods(self): repo = self.get_repo() q = pywikibot.WbQuantity(amount=1234, error=1, site=repo) self._test_hashable(q) def test_WbQuantity_integer(self): repo = self.get_repo() q = pywikibot.WbQuantity(amount=1234, error=1, site=repo) self.assertEqual(q.toWikibase(), {'amount': '+1234', 'lowerBound': '+1233', 'upperBound': '+1235', 'unit': '1', }) q = pywikibot.WbQuantity(amount=5, error=(2, 3), site=repo) self.assertEqual(q.toWikibase(), {'amount': '+5', 'lowerBound': '+2', 'upperBound': '+7', 'unit': '1', }) q = pywikibot.WbQuantity(amount=0, error=(0, 0), site=repo) self.assertEqual(q.toWikibase(), {'amount': '+0', 'lowerBound': '+0', 'upperBound': '+0', 'unit': '1', }) q = pywikibot.WbQuantity(amount=-5, error=(2, 3), site=repo) self.assertEqual(q.toWikibase(), {'amount': '-5', 'lowerBound': '-8', 'upperBound': '-3', 'unit': '1', }) def test_WbQuantity_float_27(self): repo = self.get_repo() q = pywikibot.WbQuantity(amount=0.044405586, error=0.0, site=repo) q_dict = {'amount': '+0.044405586', 'lowerBound': '+0.044405586', 'upperBound': '+0.044405586', 'unit': '1', } self.assertEqual(q.toWikibase(), q_dict) def test_WbQuantity_scientific(self): repo = self.get_repo() q = pywikibot.WbQuantity(amount='1.3e-13', error='1e-14', site=repo) q_dict = {'amount': '+1.3e-13', 'lowerBound': '+1.2e-13', 'upperBound': '+1.4e-13', 'unit': '1', } self.assertEqual(q.toWikibase(), q_dict) def test_WbQuantity_decimal(self): repo = self.get_repo() q = pywikibot.WbQuantity(amount=Decimal('0.044405586'), error=Decimal('0.0'), site=repo) q_dict = {'amount': '+0.044405586', 'lowerBound': '+0.044405586', 'upperBound': '+0.044405586', 'unit': '1', } self.assertEqual(q.toWikibase(), q_dict) def test_WbQuantity_string(self): repo = self.get_repo() q = pywikibot.WbQuantity(amount='0.044405586', error='0', site=repo) q_dict = {'amount': '+0.044405586', 'lowerBound': '+0.044405586', 'upperBound': '+0.044405586', 'unit': '1', } self.assertEqual(q.toWikibase(), q_dict) def test_WbQuantity_formatting_bound(self): repo = self.get_repo() q = pywikibot.WbQuantity(amount='0.044405586', error='0', site=repo) self.assertEqual(str(q), '{{\n' ' "amount": "+{val}",\n' ' "lowerBound": "+{val}",\n' ' "unit": "1",\n' ' "upperBound": "+{val}"\n' '}}'.format(val='0.044405586')) self.assertEqual(repr(q), 'WbQuantity(amount={val}, ' 'upperBound={val}, lowerBound={val}, ' 'unit=1)'.format(val='0.044405586')) def test_WbQuantity_self_equality(self): repo = self.get_repo() q = pywikibot.WbQuantity(amount='0.044405586', error='0', site=repo) self.assertEqual(q, q) def test_WbQuantity_fromWikibase(self): repo = self.get_repo() q = pywikibot.WbQuantity.fromWikibase({'amount': '+0.0229', 'lowerBound': '0', 'upperBound': '1', 'unit': '1'}, site=repo) self.assertEqual(q.toWikibase(), {'amount': '+0.0229', 'lowerBound': '+0.0000', 'upperBound': '+1.0000', 'unit': '1', }) def test_WbQuantity_errors(self): regex = r'^no amount given$' with self.assertRaisesRegex(ValueError, regex): pywikibot.WbQuantity(amount=None, error=1) def test_WbQuantity_entity_unit(self): repo = self.get_repo() q = pywikibot.WbQuantity(amount=1234, error=1, site=repo, unit='http://www.wikidata.org/entity/Q712226') self.assertEqual(q.toWikibase(), {'amount': '+1234', 'lowerBound': '+1233', 'upperBound': '+1235', 'unit': 'http://www.wikidata.org/entity/Q712226', }) def test_WbQuantity_unit_fromWikibase(self): repo = self.get_repo() q = pywikibot.WbQuantity.fromWikibase({ 'amount': '+1234', 'lowerBound': '+1233', 'upperBound': '+1235', 'unit': 'http://www.wikidata.org/entity/Q712226', }, site=repo) self.assertEqual(q.toWikibase(), {'amount': '+1234', 'lowerBound': '+1233', 'upperBound': '+1235', 'unit': 'http://www.wikidata.org/entity/Q712226', }) class TestWbQuantityNonDry(WbRepresentationTestCase):
MIT License
telecominfraproject/oopt-gnpy
gnpy/core/elements.py
Fiber.propagate
python
def propagate(self, *carriers): attenuation = db2lin(self.params.con_in + self.params.att_in) chan = [] for carrier in carriers: pwr = carrier.power pwr = pwr._replace(signal=pwr.signal / attenuation, nli=pwr.nli / attenuation, ase=pwr.ase / attenuation) carrier = carrier._replace(power=pwr) chan.append(carrier) carriers = tuple(f for f in chan) attenuation = db2lin(self.params.con_out) for carrier in carriers: pwr = carrier.power carrier_nli = self._gn_analytic(carrier, *carriers) pwr = pwr._replace(signal=pwr.signal / self.params.lin_attenuation / attenuation, nli=(pwr.nli + carrier_nli) / self.params.lin_attenuation / attenuation, ase=pwr.ase / self.params.lin_attenuation / attenuation) chromatic_dispersion = carrier.chromatic_dispersion + self.chromatic_dispersion(carrier.frequency) pmd = sqrt(carrier.pmd**2 + self.pmd**2) yield carrier._replace(power=pwr, chromatic_dispersion=chromatic_dispersion, pmd=pmd)
r"""Generator that computes the fiber propagation: attenuation, non-linear interference generation, CD accumulation and PMD accumulation. :param: \*carriers: the channels at the input of the fiber :yield: carrier: the next channel at the output of the fiber
https://github.com/telecominfraproject/oopt-gnpy/blob/78b45a39586704c0491d0905f17720ad77447f24/gnpy/core/elements.py#L423-L455
from numpy import abs, arange, array, divide, errstate, ones, interp, mean, pi, polyfit, polyval, sum, sqrt from scipy.constants import h, c from collections import namedtuple from gnpy.core.utils import lin2db, db2lin, arrange_frequencies, snr_sum from gnpy.core.parameters import FiberParams, PumpParams from gnpy.core.science_utils import NliSolver, RamanSolver, propagate_raman_fiber, _psi class Location(namedtuple('Location', 'latitude longitude city region')): def __new__(cls, latitude=0, longitude=0, city=None, region=None): return super().__new__(cls, latitude, longitude, city, region) class _Node: def __init__(self, uid, name=None, params=None, metadata=None, operational=None, type_variety=None): if name is None: name = uid self.uid, self.name = uid, name if metadata is None: metadata = {'location': {}} if metadata and not isinstance(metadata.get('location'), Location): metadata['location'] = Location(**metadata.pop('location', {})) self.params, self.metadata, self.operational = params, metadata, operational if type_variety: self.type_variety = type_variety @property def location(self): return self.metadata['location'] loc = location @property def longitude(self): return self.location.longitude lng = longitude @property def latitude(self): return self.location.latitude lat = latitude class Transceiver(_Node): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.osnr_ase_01nm = None self.osnr_ase = None self.osnr_nli = None self.snr = None self.passive = False self.baud_rate = None self.chromatic_dispersion = None self.pmd = None def _calc_cd(self, spectral_info): self.chromatic_dispersion = [carrier.chromatic_dispersion * 1e3 for carrier in spectral_info.carriers] def _calc_pmd(self, spectral_info): self.pmd = [carrier.pmd*1e12 for carrier in spectral_info.carriers] def _calc_snr(self, spectral_info): with errstate(divide='ignore'): self.baud_rate = [c.baud_rate for c in spectral_info.carriers] ratio_01nm = [lin2db(12.5e9 / b_rate) for b_rate in self.baud_rate] self.raw_osnr_ase = [lin2db(divide(c.power.signal, c.power.ase)) for c in spectral_info.carriers] self.raw_osnr_ase_01nm = [ase - ratio for ase, ratio in zip(self.raw_osnr_ase, ratio_01nm)] self.raw_osnr_nli = [lin2db(divide(c.power.signal, c.power.nli)) for c in spectral_info.carriers] self.raw_snr = [lin2db(divide(c.power.signal, c.power.nli + c.power.ase)) for c in spectral_info.carriers] self.raw_snr_01nm = [snr - ratio for snr, ratio in zip(self.raw_snr, ratio_01nm)] self.osnr_ase = self.raw_osnr_ase self.osnr_ase_01nm = self.raw_osnr_ase_01nm self.osnr_nli = self.raw_osnr_nli self.snr = self.raw_snr self.snr_01nm = self.raw_snr_01nm def update_snr(self, *args): snr_added = 0 for s in args: snr_added += db2lin(-s) snr_added = -lin2db(snr_added) self.osnr_ase = list(map(lambda x, y: snr_sum(x, y, snr_added), self.raw_osnr_ase, self.baud_rate)) self.snr = list(map(lambda x, y: snr_sum(x, y, snr_added), self.raw_snr, self.baud_rate)) self.osnr_ase_01nm = list(map(lambda x: snr_sum(x, 12.5e9, snr_added), self.raw_osnr_ase_01nm)) self.snr_01nm = list(map(lambda x: snr_sum(x, 12.5e9, snr_added), self.raw_snr_01nm)) @property def to_json(self): return {'uid': self.uid, 'type': type(self).__name__, 'metadata': { 'location': self.metadata['location']._asdict() } } def __repr__(self): return (f'{type(self).__name__}(' f'uid={self.uid!r}, ' f'osnr_ase_01nm={self.osnr_ase_01nm!r}, ' f'osnr_ase={self.osnr_ase!r}, ' f'osnr_nli={self.osnr_nli!r}, ' f'snr={self.snr!r}, ' f'chromatic_dispersion={self.chromatic_dispersion!r}, ' f'pmd={self.pmd!r})') def __str__(self): if self.snr is None or self.osnr_ase is None: return f'{type(self).__name__} {self.uid}' snr = round(mean(self.snr), 2) osnr_ase = round(mean(self.osnr_ase), 2) osnr_ase_01nm = round(mean(self.osnr_ase_01nm), 2) snr_01nm = round(mean(self.snr_01nm), 2) cd = mean(self.chromatic_dispersion) pmd = mean(self.pmd) return '\n'.join([f'{type(self).__name__} {self.uid}', f' GSNR (0.1nm, dB): {snr_01nm:.2f}', f' GSNR (signal bw, dB): {snr:.2f}', f' OSNR ASE (0.1nm, dB): {osnr_ase_01nm:.2f}', f' OSNR ASE (signal bw, dB): {osnr_ase:.2f}', f' CD (ps/nm): {cd:.2f}', f' PMD (ps): {pmd:.2f}']) def __call__(self, spectral_info): self._calc_snr(spectral_info) self._calc_cd(spectral_info) self._calc_pmd(spectral_info) return spectral_info RoadmParams = namedtuple('RoadmParams', 'target_pch_out_db add_drop_osnr pmd restrictions per_degree_pch_out_db') class Roadm(_Node): def __init__(self, *args, params, **kwargs): if 'per_degree_pch_out_db' not in params.keys(): params['per_degree_pch_out_db'] = {} super().__init__(*args, params=RoadmParams(**params), **kwargs) self.loss = 0 self.effective_loss = None self.effective_pch_out_db = self.params.target_pch_out_db self.passive = True self.restrictions = self.params.restrictions self.per_degree_pch_out_db = self.params.per_degree_pch_out_db @property def to_json(self): return {'uid': self.uid, 'type': type(self).__name__, 'params': { 'target_pch_out_db': self.effective_pch_out_db, 'restrictions': self.restrictions, 'per_degree_pch_out_db': self.per_degree_pch_out_db }, 'metadata': { 'location': self.metadata['location']._asdict() } } def __repr__(self): return f'{type(self).__name__}(uid={self.uid!r}, loss={self.loss!r})' def __str__(self): if self.effective_loss is None: return f'{type(self).__name__} {self.uid}' return '\n'.join([f'{type(self).__name__} {self.uid}', f' effective loss (dB): {self.effective_loss:.2f}', f' pch out (dBm): {self.effective_pch_out_db:.2f}']) def propagate(self, pref, *carriers, degree): per_degree_pch = self.per_degree_pch_out_db[degree] if degree in self.per_degree_pch_out_db.keys() else self.params.target_pch_out_db self.effective_pch_out_db = min(pref.p_spani, per_degree_pch) self.effective_loss = pref.p_spani - self.effective_pch_out_db carriers_power = array([c.power.signal + c.power.nli + c.power.ase for c in carriers]) carriers_att = list(map(lambda x: lin2db(x * 1e3) - per_degree_pch, carriers_power)) exceeding_att = -min(list(filter(lambda x: x < 0, carriers_att)), default=0) carriers_att = list(map(lambda x: db2lin(x + exceeding_att), carriers_att)) for carrier_att, carrier in zip(carriers_att, carriers): pwr = carrier.power pwr = pwr._replace(signal=pwr.signal / carrier_att, nli=pwr.nli / carrier_att, ase=pwr.ase / carrier_att) pmd = sqrt(carrier.pmd**2 + self.params.pmd**2) yield carrier._replace(power=pwr, pmd=pmd) def update_pref(self, pref): return pref._replace(p_span0=pref.p_span0, p_spani=self.effective_pch_out_db) def __call__(self, spectral_info, degree): carriers = tuple(self.propagate(spectral_info.pref, *spectral_info.carriers, degree=degree)) pref = self.update_pref(spectral_info.pref) return spectral_info._replace(carriers=carriers, pref=pref) FusedParams = namedtuple('FusedParams', 'loss') class Fused(_Node): def __init__(self, *args, params=None, **kwargs): if params is None: params = {'loss': 1} super().__init__(*args, params=FusedParams(**params), **kwargs) self.loss = self.params.loss self.passive = True @property def to_json(self): return {'uid': self.uid, 'type': type(self).__name__, 'params': { 'loss': self.loss }, 'metadata': { 'location': self.metadata['location']._asdict() } } def __repr__(self): return f'{type(self).__name__}(uid={self.uid!r}, loss={self.loss!r})' def __str__(self): return '\n'.join([f'{type(self).__name__} {self.uid}', f' loss (dB): {self.loss:.2f}']) def propagate(self, *carriers): attenuation = db2lin(self.loss) for carrier in carriers: pwr = carrier.power pwr = pwr._replace(signal=pwr.signal / attenuation, nli=pwr.nli / attenuation, ase=pwr.ase / attenuation) yield carrier._replace(power=pwr) def update_pref(self, pref): return pref._replace(p_span0=pref.p_span0, p_spani=pref.p_spani - self.loss) def __call__(self, spectral_info): carriers = tuple(self.propagate(*spectral_info.carriers)) pref = self.update_pref(spectral_info.pref) return spectral_info._replace(carriers=carriers, pref=pref) class Fiber(_Node): def __init__(self, *args, params=None, **kwargs): if not params: params = {} super().__init__(*args, params=FiberParams(**params), **kwargs) self.pch_out_db = None self.nli_solver = NliSolver(self) @property def to_json(self): return {'uid': self.uid, 'type': type(self).__name__, 'type_variety': self.type_variety, 'params': { 'length': round(self.params.length * 1e-3, 6), 'loss_coef': round(self.params.loss_coef * 1e3, 6), 'length_units': 'km', 'att_in': self.params.att_in, 'con_in': self.params.con_in, 'con_out': self.params.con_out }, 'metadata': { 'location': self.metadata['location']._asdict() } } def __repr__(self): return f'{type(self).__name__}(uid={self.uid!r}, ' f'length={round(self.params.length * 1e-3,1)!r}km, ' f'loss={round(self.loss,1)!r}dB)' def __str__(self): if self.pch_out_db is None: return f'{type(self).__name__} {self.uid}' return '\n'.join([f'{type(self).__name__} {self.uid}', f' type_variety: {self.type_variety}', f' length (km): {self.params.length * 1e-3:.2f}', f' pad att_in (dB): {self.params.att_in:.2f}', f' total loss (dB): {self.loss:.2f}', f' (includes conn loss (dB) in: {self.params.con_in:.2f} out: {self.params.con_out:.2f})', f' (conn loss out includes EOL margin defined in eqpt_config.json)', f' pch out (dBm): {self.pch_out_db:.2f}']) @property def loss(self): return self.params.loss_coef * self.params.length + self.params.con_in + self.params.con_out + self.params.att_in @property def passive(self): return True def alpha(self, frequencies): if type(self.params.loss_coef) == dict: alpha = interp(frequencies, self.params.f_loss_ref, self.params.lin_loss_exp) else: alpha = self.params.lin_loss_exp * ones(frequencies.shape) return alpha def alpha0(self, f_ref=193.5e12): return self.alpha(f_ref * ones(1))[0] def chromatic_dispersion(self, freq=193.5e12): beta2 = self.params.beta2 beta3 = self.params.beta3 ref_f = self.params.ref_frequency length = self.params.length beta = beta2 + 2 * pi * beta3 * (freq - ref_f) dispersion = -beta * 2 * pi * ref_f**2 / c return dispersion * length @property def pmd(self): return self.params.pmd_coef * sqrt(self.params.length) def _gn_analytic(self, carrier, *carriers): g_nli = 0 for interfering_carrier in carriers: psi = _psi(carrier, interfering_carrier, beta2=self.params.beta2, asymptotic_length=self.params.asymptotic_length) g_nli += (interfering_carrier.power.signal / interfering_carrier.baud_rate)**2 * (carrier.power.signal / carrier.baud_rate) * psi g_nli *= (16 / 27) * (self.params.gamma * self.params.effective_length)**2 / (2 * pi * abs(self.params.beta2) * self.params.asymptotic_length) carrier_nli = carrier.baud_rate * g_nli return carrier_nli
BSD 3-Clause New or Revised License
hernanchavezthielemann/gro2lam
lib/handling/gromacs.py
sidemol_data_gatherer
python
def sidemol_data_gatherer( _sm_files_, _sm_): print( '\nSearching for: {}'.format( _sm_ )) _flag_ = True _file_ = '' _sm_data_c_ = {} for smfile in _sm_files_: with open( smfile, 'r') as sm_data: read_flag = False i = 0 for j_line in sm_data: j_line = j_line.split(';')[0].strip() if j_line.startswith('['): if j_line.startswith('[ moleculetype ]'): read_flag = True i = 0 else: read_flag = False elif read_flag and j_line.startswith( _sm_): _file_ = smfile break elif read_flag: i +=1 if i > 3: read_flag = False if _file_=='': pop_err_1('Error!! side molecule {} not found in itp -- '.format( _sm_)) _flag_ = False else: print( 'Success!, found in : {}\n'.format( _file_)) tag_str = [ 'atoms', 'bonds', 'angles', 'dihedrals','fin'] _sm_data_c_ = { x:[] for x in tag_str if x != 'fin'} read_flag = False iner_flag = False cd_tag = '' i = 0 with open( _file_, 'r') as sm_data: for j_line0 in sm_data: j_line = j_line0.split(';')[0].split() if not j_line: pass elif read_flag: if j_line[0][0] == '#': pass elif j_line[0][0] == '[': if j_line[1] != tag_str[i] : if j_line[1] in tag_str[i+1:]: i = tag_str.index( j_line[1]) cd_tag = tag_str[i] iner_flag = True print( '** Gathering {} data'.format( cd_tag)) elif j_line[1] == 'moleculetype': break else: txt_s = '> {} not considered in {}' print txt_s.format( j_line[1], _sm_) iner_flag = False else : cd_tag = tag_str[i] print( '* Gathering {} data'.format( cd_tag)) iner_flag = True elif iner_flag: _sm_data_c_[ cd_tag].append( j_line) elif j_line0.lstrip().startswith( _sm_): read_flag = True _sm_data_c_, _ = split_dihedral_improper( _sm_data_c_) return _sm_data_c_, _flag_
collects all the data related with one kind of side molecule the data types are specified in startstrings
https://github.com/hernanchavezthielemann/gro2lam/blob/e60aca80ad441f9b29acd30a6bef4c47d97e5e30/lib/handling/gromacs.py#L582-L672
__author__ = 'Hernan Chavez Thielemann <hchavezthiele at gmail dot com>' from lib.misc.warn import wrg_1, wrg_3, pop_err_1, pop_wrg_1 from lib.misc.file import check_file, debugger_file, fileseeker from lib.misc.geometry import rotate, arcos, raiz from lib.misc.data import isnot_num from sys import exit def extract_gromacs_data( _data_files_, _autoload_): filename_gro = _data_files_[0] filename_top = _data_files_[1] filename_ff = _data_files_[2] filename_nb = _data_files_[3] filename_bon = _data_files_[4] data_container = {} data_container['define'] = {} print 'Autoload: {}\n'.format( _autoload_) if not _autoload_: print filename_ff _sidemol_f_ = False section = '''--------------- FILE GRO ----------------------''' ok_flag, gro_pack, b_xyz = get_gro_fixed_line( filename_gro) if not ok_flag: pop_err_1('Problem detected in :\n' + section) return {}, [ ok_flag, _sidemol_f_] _mol_, _mtype_, _type_, _xyz_, _mtypes_ = gro_pack data_container['box'] = [[],[]] b_xyz = [ x_y_z*10 for x_y_z in b_xyz ] angles = [] Ar = [[0,0,0],[0,0,0],[0,0,0]] for i in range(3): Ar[i][i] = b_xyz[i] if sum( b_xyz) < 2.8: exit('xx/0 Error in .gro file, box dimension 000') elif len( b_xyz) == 3: pass elif len( b_xyz) == 9: k = 0 for i in range(3): for j in range(3): if i != j: Ar[i][j] = b_xyz[ k + 3] k += 1 cero = 1e-12 if Ar[1][0] < cero or Ar[2][0] < cero or Ar[2][1] < cero: print('Your triclinic cell will be rotated to make it!') a_tor_y = -arcos( (Ar[0][0])/(raiz(Ar[0][0]*Ar[0][0]+Ar[2][0]*Ar[2][0])) ) Ar = rotate( Ar, a_tor_y, 'y') a_tor_z = arcos( (Ar[0][0])/(raiz(Ar[0][0]*Ar[0][0]+Ar[1][0]*Ar[1][0])) ) Ar = rotate( Ar, a_tor_z, 'z') a_tor_x = arcos( Ar[1][1]/( raiz( Ar[1][1]*Ar[1][1] + Ar[2][1]*Ar[2][1])) ) Ar = rotate( Ar, a_tor_x) _xyz_ = rotate( rotate( rotate( _xyz_, a_tor_y, 'y'), a_tor_z, 'z'), a_tor_x) else: exit('xx/0 Error box dimension 001') _x_, _y_, _z_ = _xyz_ xlo = min( _x_)*10 xhi = xlo + Ar[0][0] ylo = min( _y_)*10 yhi = ylo + Ar[1][1] zlo = min( _z_)*10 zhi = zlo + Ar[2][2] data_container['box'][0] = [ xlo, xhi, ylo, yhi, zlo, zhi] data_container['box'][1] = [ Ar[0][1], Ar[0][2], Ar[1][2]] data_container['atomsdata'] = [ _mol_, _mtypes_, _type_, _xyz_, _mtype_] section = '''---------------- .FILE TOP. ---------------''' data_container['defaults'], ok_flag, _a_fff_ = ck_forcefield( filename_ff, filename_top) filename_ff = _a_fff_ if not ok_flag: pop_err_1('Problem detected in :\n' + section.split('.')[1]) return {}, [ ok_flag, _sidemol_f_] buckorlj = int(data_container['defaults'][0]) startstrings = ['[ moleculetype ]', '[ atoms ]', '[ bonds ]', '[ pairs ]', '[ angles ]', '[ dihedrals ]', '[ system ]', '[ molecules ]', ''] exclusions_ = ['[ bonds ]', '[ pairs ]', '[ angles ]', '[ dihedrals ]'] pure_side_mol_flag = ( ( seek_for_directive( [ filename_top], 'moleculetype') == '') or ( filename_nb == filename_ff and filename_nb == filename_bon)) if pure_side_mol_flag: startstrings = startstrings[-3:] print wrg_3( 'Using pure side molecule scheme') data_container['atoms'] = [] data_container['bonds'] = [] data_container['angles'] = [] data_container['dihedrals'] = [] for ti in range(len(startstrings))[:-1]: s_str_ = startstrings[ ti][ 2:-2] data_container[ s_str_], ok_flag, _ = get_topitp_line( filename_top, startstrings[ti] ) if not ok_flag: if startstrings[ti] not in exclusions_: print wrg_3( 'Not ok flag in <extract_gromacs_data> top file' + 'section, in ' + s_str_) return {}, [ ok_flag, _sidemol_f_] else: ok_flag = True n_atoms = len( data_container['atoms']) n_bonds = len( data_container['bonds']) n_angles = len( data_container['angles']) section = '''---------- .SIDE MOLE FILES. -------------''' if _autoload_: data_container, ok_flag, _sidemol_f_ = sidemol_data( filename_top, data_container) if not ok_flag: pop_err_1( 'Problem detected in :\n' + section.split('.')[1]) return {}, [ ok_flag, _sidemol_f_] section = '''----------------- .FILE NB. ---------------''' startstrings = ['[ atomtypes ]', '[ nonbond_params ]'] data_container['atomtypes'], ok_flag, _ = get_topitp_line( filename_nb, '[ atomtypes ]') if not ok_flag: pop_err_1('Problem detected in :\n' + section.split('.')[1]) return {}, [ ok_flag, _sidemol_f_] n_atomtypes = len( data_container['atomtypes']) section = '''---------------- .FILE BON. ---------------''' startstrings = ['[ bondtypes ]', '[ angletypes ]', '[ dihedraltypes ]', ''] if filename_nb == filename_ff and filename_nb == filename_bon: for bi in range( len( startstrings))[:-1]: s_str_ = startstrings[ bi][ 2:-2] data_container[ s_str_] = [] data_container['define'][s_str_[:-5]] = {} startstrings = startstrings[-1] aux_strings = [ 'bonds', 'angles', 'dihedrals'] for bi in range( len( startstrings))[:-1]: s_str_ = startstrings[ bi][ 2:-2] _aux_here_ = get_topitp_line( filename_bon, startstrings[ bi]) data_container[ s_str_], ok_flag, _data_define_ = _aux_here_ if bi == 2: for di in range( len(data_container[ s_str_])): dih_pt_line = data_container[ s_str_][di] if not isnot_num( dih_pt_line[2]): pop_wrg_1( 'Dihedral potential problem found!!\nAdopting' + ' X-A1-A2-X configuration for: ' + ' {}-{}'.format( *dih_pt_line[:2]) ) new_row = ['X'] + dih_pt_line[:2] + ['X'] + dih_pt_line[2:] data_container[ s_str_][di] = new_row elif not isnot_num( dih_pt_line[3]): exit('Error 0031 undefined dihedral') data_container['define'][s_str_[:-5]] = _data_define_ if not ok_flag: if data_container[ aux_strings[ bi]] != []: pop_err_1('Problem detected in :\n' + section.split('.')[1]) return {}, [ ok_flag, _sidemol_f_] else: ok_flag = True section = '''------------ .#define & Impropers. ------------''' gromosff_flag = False data_container[ 'define'][ 'improper'] = {} aux_here = {} print( section.split('.')[1]) if filename_nb != filename_ff and filename_nb != filename_bon: print(" Is it GROMOS there ?? ") aux_here = get_gromos_define( filename_bon) else: print('no gromos check') for key_ in aux_here.keys(): if aux_here[ key_] != {}: print ( 'GROMOS ' + key_ + ' kind detected!') data_container[ 'define'][ key_].update( aux_here[ key_]) gromosff_flag = True dihe_g_data = data_container[ 'dihedraltypes'] if 'dihedraltypes' == key_+'types' and dihe_g_data != []: rewrite_flag = False for gd_ in range( len( dihe_g_data)): if dihe_g_data[gd_][2].isdigit(): if not rewrite_flag: print('Dihedral with 2 atoms re-formating to 4: ') rewrite_flag = True dihe_g_data[gd_] = ( [ 'X',] + dihe_g_data[ gd_][:2] + [ 'X',] + dihe_g_data[ gd_][2:]) print (dihe_g_data[ gd_]) if rewrite_flag: data_container[ 'dihedraltypes'] = dihe_g_data if gromosff_flag: for ss_ in startstrings[:-1]: s_str_ = ss_[ 2:-2] data_aux = data_container[ s_str_] cont_k = s_str_[ :-5] cddd = data_container[ 'define'][ cont_k] for i in range( len( data_aux)): if len( data_aux[i][-1].split('.')) < 2: if not data_aux[i][-1].isdigit(): aux = data_aux[i][:-1] + cddd[ data_aux[i][-1]] data_container[ s_str_][i] = aux data_container = split_define_dihe_impr( data_container) n_dihedrals = len( data_container['dihedrals']) n_impropers = len( data_container['impropers']) '''-------------- "Side Mol" --------------''' n_atomsnew = len( _type_) if _sidemol_f_: sidemol = data_container['sidemol'] side_bonds_n = 0 side_angles_n = 0 side_dihed_n = 0 side_improp_n = 0 for sb in range( len( sidemol['tag'])): bonds_x_mol = len( sidemol['data'][sb]['bonds']) angles_x_mol = len( sidemol['data'][sb]['angles']) dihedr_x_mol = len( sidemol['data'][sb]['dihedrals']) improp_x_mol = len( sidemol['data'][sb]['impropers']) sm_quantity = sidemol['num'][sb] side_bonds_n += sm_quantity * bonds_x_mol side_angles_n += sm_quantity * angles_x_mol side_dihed_n += sm_quantity * dihedr_x_mol side_improp_n += sm_quantity * improp_x_mol n_bondsnew = n_bonds + side_bonds_n n_anglesnew = n_angles + side_angles_n n_dihednew = n_dihedrals + side_dihed_n n_impropnew = n_impropers + side_improp_n contentkey = [ 'bond', 'angle', 'improper', 'dihedral'] for cont_k in contentkey: cddd = data_container[ 'define'][ cont_k] if cddd.keys() != []: for sb in range( len( sidemol['tag'])): datacont = sidemol['data'][sb][cont_k+'s'] for dc in range( len( datacont)): if isnot_num( datacont[dc][-1]): aux = datacont[dc][:-1] + cddd[ datacont[dc][-1]] sidemol['data'][sb][cont_k+'s'][dc] = aux _charge_ = {} _conv_dict_ = {} for sb in range( len( sidemol['tag'])): for at in range( len( sidemol['data'][sb]['atoms'])): a_opls_tag = sidemol['data'][sb]['atoms'][at][1] a_elem_tag = sidemol['data'][sb]['atoms'][at][4] a_charge = float( sidemol['data'][sb]['atoms'][at][6]) _charge_[a_opls_tag] = a_charge _conv_dict_[ a_elem_tag] = a_opls_tag print '='*45+'\n'+'='*5+' Charges found: ' print _charge_ print _conv_dict_ data_container['S_charge'] = _charge_ data_container['S_translation'] = _conv_dict_ smol_extra_bondtypes = [] smol_extra_angletypes = [] smol_extra_dihedraltypes = [] smol_extra_impropertypes = [] bn_namelist = [] an_namelist = [] di_namelist = [] im_namelist = [] for sb in range( len( sidemol['tag'])): _smd_ = sidemol['data'][sb] _at_dic_here = {} for _at in range( len( _smd_['atoms'])): _smat_ = _smd_['atoms'][_at] _at_dic_here[ _smat_[0]] = _smat_[1] for _bn in range( len( _smd_['bonds'])): _smbn_ = _smd_['bonds'][_bn] aux_here = [_at_dic_here[ _smbn_[0]], _at_dic_here[ _smbn_[1]]] name = '{}-{}'.format(*aux_here) if name not in bn_namelist and len( _smbn_[2:]) > 1: bn_namelist.append( name) smol_extra_bondtypes.append( aux_here + _smbn_[2:]) for _an in range( len( _smd_['angles'])): _sman_ = _smd_['angles'][_an] aux_here = [_at_dic_here[ _sman_[0]], _at_dic_here[ _sman_[1]], _at_dic_here[ _sman_[2]] ] name = '{}-{}-{}'.format(*aux_here) if name not in an_namelist and len( _sman_[3:]) > 1: an_namelist.append( name) smol_extra_angletypes.append( aux_here + _sman_[3:]) for _dh in range( len( _smd_['dihedrals'])): _smdh_ = _smd_['dihedrals'][_dh] aux_here = [_at_dic_here[ _smdh_[0]], _at_dic_here[ _smdh_[1]], _at_dic_here[ _smdh_[2]], _at_dic_here[ _smdh_[3]]] name = '{}-{}-{}-{}'.format(*aux_here) if name not in di_namelist and len( _smdh_[4:]) > 1: di_namelist.append( name) smol_extra_dihedraltypes.append( aux_here + _smdh_[4:]) for _im in range( len( _smd_['impropers'])): _smim_ = _smd_['impropers'][_im] aux_here = [_at_dic_here[ _smim_[0]], _at_dic_here[ _smim_[1]], _at_dic_here[ _smim_[2]], _at_dic_here[ _smim_[3]]] name = '{}-{}-{}-{}'.format(*aux_here) if name not in im_namelist and len( _smim_[4:]) > 1: im_namelist.append( name) smol_extra_impropertypes.append( aux_here + _smim_[4:]) if len( _smd_.keys()) > 5: print ('Uuupa!! This thing is not implemented yet' + ' as side mol part') a_key = [ 'atoms', 'bonds', 'angles', 'dihedrals', 'impropers'] for ky in _smd_.keys(): if ky not in a_key: print ('-- > this key : ' + ky) data_container['bondtypes'] = ( smol_extra_bondtypes + data_container['bondtypes'] ) data_container['angletypes'] = ( smol_extra_angletypes + data_container['angletypes']) data_container['dihedraltypes'] = ( smol_extra_dihedraltypes + data_container['dihedraltypes']) data_container['impropertypes'] = ( smol_extra_impropertypes + data_container['impropertypes']) else: n_bondsnew = n_bonds n_anglesnew = n_angles n_atomsnew = n_atoms n_dihednew = n_dihedrals n_impropnew = n_impropers nice_list = [ 'bondtypes', 'angletypes', 'dihedraltypes','impropertypes'] for it in range( len( nice_list)): _aux_set_here = set() poss = it + 2 if poss > 4: poss = 4 for i in range( len ( data_container[ nice_list[it] ])): _aux_set_here.add( data_container[ nice_list[it] ][i][ poss ]) data_container[ nice_list[it][:4]+'_kinds'] = _aux_set_here n_bondstypes = len( data_container['bondtypes']) n_anglestypes = len( data_container['angletypes']) n_dihedraltypes = len( data_container['dihedraltypes']) n_impropertypes = len( data_container['impropertypes']) data_container['numbers']={} data_container['numbers']['total'] = [n_atomsnew, n_bondsnew, n_anglesnew, n_dihednew, n_impropnew ] data_container['numbers']['type'] = [n_atomtypes, n_bondstypes, n_anglestypes, n_dihedraltypes, n_impropertypes] print 'Ending gromacs data parsing\n' return data_container, [ ok_flag, _sidemol_f_] def sidemol_data( _file_top_, data_container): sidemol = {'tag': [],'num':[], 'data':[] } sm_flag = False _aux_m_ = data_container[ 'molecules'] if 'moleculetype' in data_container.keys(): non_sm = data_container['moleculetype'] non_sm = [non_sm[i][0] for i in range(len(non_sm))] _buffer_ = '' else: non_sm = [''] _buffer_ = '0' for i in range( len( _aux_m_)) : if _aux_m_[i][0] not in non_sm: sidemol['tag'].append( _aux_m_[i][0]) sidemol['num'].append( int(_aux_m_[i][1])) sm_flag = True if sm_flag: print ('\nLoading side molecule files: ' ) _sm_files_ = [] root_dir = '/'.join( _file_top_.split('/')[:-1]+['']) ok_flag = False with open( _file_top_, 'r') as topdata: if sidemol['tag'] == []: topdata = [] for k_line in topdata: if k_line.startswith('#'): logic_test = ('#if' not in _buffer_ and _buffer_ != '') if k_line.startswith('#include') and logic_test: if _sm_files_ == []: ok_flag = True try: new_filename = k_line.split('"')[1].lstrip('.') except IndexError: auxt = wrg_1( 'Format error with {}') print( auxt.format( k_line.split()[-1] ) ) new_filename = new_filename.lstrip('/').split('/')[-1] po_file = fileseeker( root_dir, new_filename) if po_file != []: _sm_files_.append( po_file[0]) print( 'SM_file : {}'.format(_sm_files_[-1])) ok_flag *= check_file( po_file[0], content='[ atoms ]') else: _buffer_ = k_line if ok_flag: for sm in sidemol['tag']: aux_data, aux_flag = sidemol_data_gatherer( _sm_files_, sm) ok_flag *= aux_flag sidemol['data'].append( aux_data) data_container['sidemol'] = sidemol else: print ('No side molecule files detected!' ) ok_flag = True return data_container, ok_flag, sm_flag
MIT License
nickjj/sublime-text-3-packages
Packages/python-markdown/st3/markdown/inlinepatterns.py
HtmlPattern.unescape
python
def unescape(self, text): try: stash = self.markdown.treeprocessors['inline'].stashed_nodes except KeyError: return text def get_stash(m): id = m.group(1) value = stash.get(id) if value is not None: try: return self.markdown.serializer(value) except: return '\%s' % value return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text)
Return unescaped text given text with an inline placeholder.
https://github.com/nickjj/sublime-text-3-packages/blob/15a992d5982337169dadb50fd0dbca4ca3be992e/Packages/python-markdown/st3/markdown/inlinepatterns.py#L335-L351
from __future__ import absolute_import from __future__ import unicode_literals from . import util from . import odict import re try: from urllib.parse import urlparse, urlunparse except ImportError: from urlparse import urlparse, urlunparse try: from html import entities except ImportError: import htmlentitydefs as entities def build_inlinepatterns(md_instance, **kwargs): inlinePatterns = odict.OrderedDict() inlinePatterns["backtick"] = BacktickPattern(BACKTICK_RE) inlinePatterns["escape"] = EscapePattern(ESCAPE_RE, md_instance) inlinePatterns["reference"] = ReferencePattern(REFERENCE_RE, md_instance) inlinePatterns["link"] = LinkPattern(LINK_RE, md_instance) inlinePatterns["image_link"] = ImagePattern(IMAGE_LINK_RE, md_instance) inlinePatterns["image_reference"] = ImageReferencePattern( IMAGE_REFERENCE_RE, md_instance ) inlinePatterns["short_reference"] = ReferencePattern( SHORT_REF_RE, md_instance ) inlinePatterns["autolink"] = AutolinkPattern(AUTOLINK_RE, md_instance) inlinePatterns["automail"] = AutomailPattern(AUTOMAIL_RE, md_instance) inlinePatterns["linebreak"] = SubstituteTagPattern(LINE_BREAK_RE, 'br') if md_instance.safeMode != 'escape': inlinePatterns["html"] = HtmlPattern(HTML_RE, md_instance) inlinePatterns["entity"] = HtmlPattern(ENTITY_RE, md_instance) inlinePatterns["not_strong"] = SimpleTextPattern(NOT_STRONG_RE) inlinePatterns["em_strong"] = DoubleTagPattern(EM_STRONG_RE, 'strong,em') inlinePatterns["strong_em"] = DoubleTagPattern(STRONG_EM_RE, 'em,strong') inlinePatterns["strong"] = SimpleTagPattern(STRONG_RE, 'strong') inlinePatterns["emphasis"] = SimpleTagPattern(EMPHASIS_RE, 'em') if md_instance.smart_emphasis: inlinePatterns["emphasis2"] = SimpleTagPattern(SMART_EMPHASIS_RE, 'em') else: inlinePatterns["emphasis2"] = SimpleTagPattern(EMPHASIS_2_RE, 'em') return inlinePatterns NOBRACKET = r'[^\]\[]*' BRK = ( r'\[(' + (NOBRACKET + r'(\[')*6 + (NOBRACKET + r'\])*')*6 + NOBRACKET + r')\]' ) NOIMG = r'(?<!\!)' BACKTICK_RE = r'(?<!\\)(`+)(.+?)(?<!`)\2(?!`)' ESCAPE_RE = r'\\(.)' EMPHASIS_RE = r'(\*)([^\*]+)\2' STRONG_RE = r'(\*{2}|_{2})(.+?)\2' EM_STRONG_RE = r'(\*|_)\2{2}(.+?)\2(.*?)\2{2}' STRONG_EM_RE = r'(\*|_)\2{2}(.+?)\2{2}(.*?)\2' SMART_EMPHASIS_RE = r'(?<!\w)(_)(?!_)(.+?)(?<!_)\2(?!\w)' EMPHASIS_2_RE = r'(_)(.+?)\2' LINK_RE = NOIMG + BRK + r'''\(\s*(<.*?>|((?:(?:\(.*?\))|[^\(\)]))*?)\s*((['"])(.*?)\12\s*)?\)''' IMAGE_LINK_RE = r'\!' + BRK + r'\s*\((<.*?>|([^")]+"[^"]*"|[^\)]*))\)' REFERENCE_RE = NOIMG + BRK + r'\s?\[([^\]]*)\]' SHORT_REF_RE = NOIMG + r'\[([^\]]+)\]' IMAGE_REFERENCE_RE = r'\!' + BRK + '\s?\[([^\]]*)\]' NOT_STRONG_RE = r'((^| )(\*|_)( |$))' AUTOLINK_RE = r'<((?:[Ff]|[Hh][Tt])[Tt][Pp][Ss]?://[^>]*)>' AUTOMAIL_RE = r'<([^> \!]*@[^> ]*)>' HTML_RE = r'(\<([a-zA-Z/][^\>]*?|\!--.*?--)\>)' ENTITY_RE = r'(&[\#a-zA-Z0-9]*;)' LINE_BREAK_RE = r' \n' def dequote(string): if ((string.startswith('"') and string.endswith('"')) or (string.startswith("'") and string.endswith("'"))): return string[1:-1] else: return string ATTR_RE = re.compile("\{@([^\}]*)=([^\}]*)}") def handleAttributes(text, parent): def attributeCallback(match): parent.set(match.group(1), match.group(2).replace('\n', ' ')) return ATTR_RE.sub(attributeCallback, text) class Pattern(object): def __init__(self, pattern, markdown_instance=None): self.pattern = pattern self.compiled_re = re.compile("^(.*?)%s(.*)$" % pattern, re.DOTALL | re.UNICODE) self.safe_mode = False if markdown_instance: self.markdown = markdown_instance def getCompiledRegExp(self): return self.compiled_re def handleMatch(self, m): pass def type(self): return self.__class__.__name__ def unescape(self, text): try: stash = self.markdown.treeprocessors['inline'].stashed_nodes except KeyError: return text def itertext(el): tag = el.tag if not isinstance(tag, util.string_type) and tag is not None: return if el.text: yield el.text for e in el: for s in itertext(e): yield s if e.tail: yield e.tail def get_stash(m): id = m.group(1) if id in stash: value = stash.get(id) if isinstance(value, util.string_type): return value else: return ''.join(itertext(value)) return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text) class SimpleTextPattern(Pattern): def handleMatch(self, m): return m.group(2) class EscapePattern(Pattern): def handleMatch(self, m): char = m.group(2) if char in self.markdown.ESCAPED_CHARS: return '%s%s%s' % (util.STX, ord(char), util.ETX) else: return None class SimpleTagPattern(Pattern): def __init__(self, pattern, tag): Pattern.__init__(self, pattern) self.tag = tag def handleMatch(self, m): el = util.etree.Element(self.tag) el.text = m.group(3) return el class SubstituteTagPattern(SimpleTagPattern): def handleMatch(self, m): return util.etree.Element(self.tag) class BacktickPattern(Pattern): def __init__(self, pattern): Pattern.__init__(self, pattern) self.tag = "code" def handleMatch(self, m): el = util.etree.Element(self.tag) el.text = util.AtomicString(m.group(3).strip()) return el class DoubleTagPattern(SimpleTagPattern): def handleMatch(self, m): tag1, tag2 = self.tag.split(",") el1 = util.etree.Element(tag1) el2 = util.etree.SubElement(el1, tag2) el2.text = m.group(3) if len(m.groups()) == 5: el2.tail = m.group(4) return el1 class HtmlPattern(Pattern): def handleMatch(self, m): rawhtml = self.unescape(m.group(2)) place_holder = self.markdown.htmlStash.store(rawhtml) return place_holder
MIT License
dywlavender/multi-label-classification
backbone/NA/mobilenet.py
MobileNet
python
def MobileNet(input_shape=None, alpha=1.0, depth_multiplier=1, dropout=1e-3, include_top=True, weights='imagenet', input_tensor=None, pooling=None, classes=1000, **kwargs): global backend, layers, models, keras_utils backend, layers, models, keras_utils = get_submodules_from_kwargs(kwargs) if not (weights in {'imagenet', None} or os.path.exists(weights)): raise ValueError('The `weights` argument should be either ' '`None` (random initialization), `imagenet` ' '(pre-training on ImageNet), ' 'or the path to the weights file to be loaded.') if weights == 'imagenet' and include_top and classes != 1000: raise ValueError('If using `weights` as `"imagenet"` with `include_top` ' 'as true, `classes` should be 1000') if input_shape is None: default_size = 224 else: if backend.image_data_format() == 'channels_first': rows = input_shape[1] cols = input_shape[2] else: rows = input_shape[0] cols = input_shape[1] if rows == cols and rows in [128, 160, 192, 224]: default_size = rows else: default_size = 224 input_shape = _obtain_input_shape(input_shape, default_size=default_size, min_size=32, data_format=backend.image_data_format(), require_flatten=include_top, weights=weights) if backend.image_data_format() == 'channels_last': row_axis, col_axis = (0, 1) else: row_axis, col_axis = (1, 2) rows = input_shape[row_axis] cols = input_shape[col_axis] if weights == 'imagenet': if depth_multiplier != 1: raise ValueError('If imagenet weights are being loaded, ' 'depth multiplier must be 1') if alpha not in [0.25, 0.50, 0.75, 1.0]: raise ValueError('If imagenet weights are being loaded, ' 'alpha can be one of' '`0.25`, `0.50`, `0.75` or `1.0` only.') if rows != cols or rows not in [128, 160, 192, 224]: rows = 224 warnings.warn('`input_shape` is undefined or non-square, ' 'or `rows` is not in [128, 160, 192, 224]. ' 'Weights for input shape (224, 224) will be' ' loaded as the default.') if input_tensor is None: img_input = layers.Input(shape=input_shape) else: if not backend.is_keras_tensor(input_tensor): img_input = layers.Input(tensor=input_tensor, shape=input_shape) else: img_input = input_tensor x = _conv_bn(img_input, 32, alpha, strides=(2, 2)) x = _depthwise_conv_block(x, 64, alpha, depth_multiplier, block_id=1) x = _depthwise_conv_block(x, 128, alpha, depth_multiplier, strides=(2, 2), block_id=2) x = _depthwise_conv_block(x, 128, alpha, depth_multiplier, block_id=3) x = _depthwise_conv_block(x, 256, alpha, depth_multiplier, strides=(2, 2), block_id=4) x = _depthwise_conv_block(x, 256, alpha, depth_multiplier, block_id=5) x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, strides=(2, 2), block_id=6) x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, block_id=7) x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, block_id=8) x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, block_id=9) x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, block_id=10) x = _depthwise_conv_block(x, 512, alpha, depth_multiplier, block_id=11) x = _depthwise_conv_block(x, 1024, alpha, depth_multiplier, strides=(2, 2), block_id=12) x = _depthwise_conv_block(x, 1024, alpha, depth_multiplier, block_id=13) if include_top: if backend.image_data_format() == 'channels_first': shape = (int(1024 * alpha), 1, 1) else: shape = (1, 1, int(1024 * alpha)) x = layers.GlobalAveragePooling2D()(x) x = layers.Reshape(shape, name='reshape_1')(x) x = layers.Dropout(dropout, name='dropout')(x) x = layers.Conv2D(classes, (1, 1), padding='same', name='conv_preds')(x) x = layers.Reshape((classes,), name='reshape_2')(x) x = layers.Activation('softmax', name='act_softmax')(x) else: if pooling == 'avg': x = layers.GlobalAveragePooling2D()(x) elif pooling == 'max': x = layers.GlobalMaxPooling2D()(x) if input_tensor is not None: inputs = keras_utils.get_source_inputs(input_tensor) else: inputs = img_input model = models.Model(inputs, x, name='mobilenet_%0.2f_%s' % (alpha, rows)) if weights == 'imagenet': if alpha == 1.0: alpha_text = '1_0' elif alpha == 0.75: alpha_text = '7_5' elif alpha == 0.50: alpha_text = '5_0' else: alpha_text = '2_5' if include_top: model_name = 'mobilenet_%s_%d_tf.h5' % (alpha_text, rows) weight_path = BASE_WEIGHT_PATH + model_name weights_path = keras_utils.get_file(model_name, weight_path, cache_subdir='models') else: model_name = 'mobilenet_%s_%d_tf_no_top.h5' % (alpha_text, rows) weight_path = BASE_WEIGHT_PATH + model_name weights_path = keras_utils.get_file(model_name, weight_path, cache_subdir='models') model.load_weights(weights_path) elif weights is not None: model.load_weights(weights) return model
Instantiates the MobileNet architecture. # Arguments input_shape: optional shape tuple, only to be specified if `include_top` is False (otherwise the input shape has to be `(224, 224, 3)` (with `channels_last` data format) or (3, 224, 224) (with `channels_first` data format). It should have exactly 3 inputs channels, and width and height should be no smaller than 32. E.g. `(200, 200, 3)` would be one valid value. alpha: controls the width of the network. This is known as the width multiplier in the MobileNet paper. - If `alpha` < 1.0, proportionally decreases the number of filters in each layer. - If `alpha` > 1.0, proportionally increases the number of filters in each layer. - If `alpha` = 1, default number of filters from the paper are used at each layer. depth_multiplier: depth multiplier for depthwise convolution. This is called the resolution multiplier in the MobileNet paper. dropout: dropout rate include_top: whether to include the fully-connected layer at the top of the network. weights: one of `None` (random initialization), 'imagenet' (pre-training on ImageNet), or the path to the weights file to be loaded. input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) to use as image input for the model. pooling: Optional pooling mode for feature extraction when `include_top` is `False`. - `None` means that the output of the model will be the 4D tensor output of the last convolutional block. - `avg` means that global average pooling will be applied to the output of the last convolutional block, and thus the output of the model will be a 2D tensor. - `max` means that global max pooling will be applied. classes: optional number of classes to classify images into, only to be specified if `include_top` is True, and if no `weights` argument is specified. # Returns A Keras model instance. # Raises ValueError: in case of invalid argument for `weights`, or invalid input shape. RuntimeError: If attempting to run this model with a backend that does not support separable convolutions.
https://github.com/dywlavender/multi-label-classification/blob/ec8567169b3f1b08c7dafeb71f007275621a5a95/backbone/NA/mobilenet.py#L107-L320
from __future__ import print_function from __future__ import absolute_import from __future__ import division import os import warnings from . import get_submodules_from_kwargs from . import imagenet_utils from .imagenet_utils import decode_predictions from .imagenet_utils import _obtain_input_shape from tensorflow import keras BASE_WEIGHT_PATH = ('https://github.com/fchollet/deep-learning-models/' 'releases/download/v0.6/') backend = None layers = None models = None keras_utils = None def preprocess_input(x, **kwargs): return imagenet_utils.preprocess_input(x, mode='tf', **kwargs) class MobileNet(object): L2_WEIGHT = 5.e-4 BATCH_NORM_DECAY = 0.9 BATCH_NORM_EPSILON = 1e-5 BATCH_SIZE_AXIS = 0 ROW_AXIS = 1 COL_AXIS = 2 CHANNEL_AXIS = 3
MIT License
hfaran/tornado-json
tornado_json/routes.py
get_module_routes
python
def get_module_routes(module_name, custom_routes=None, exclusions=None, arg_pattern=r'(?P<{}>[a-zA-Z0-9_\-]+)'): def has_method(module, cls_name, method_name): return all([ method_name in vars(getattr(module, cls_name)), is_method(reduce(getattr, [module, cls_name, method_name])) ]) def yield_args(module, cls_name, method_name): wrapped_method = reduce(getattr, [module, cls_name, method_name]) method = extract_method(wrapped_method) argspec_args = getattr(method, "__argspec_args", inspect.getfullargspec(method).args) return [a for a in argspec_args if a not in ["self"]] def generate_auto_route(module, module_name, cls_name, method_name, url_name): def get_handler_name(): if url_name == "__self__": if cls_name.lower().endswith('handler'): return cls_name.lower().replace('handler', '', 1) return cls_name.lower() else: return url_name def get_arg_route(): if yield_args(module, cls_name, method_name): return "/{}/?$".format("/".join( [arg_pattern.format(argname) for argname in yield_args(module, cls_name, method_name)] )) return r"/?" return "/{}/{}{}".format( "/".join(module_name.split(".")[1:]), get_handler_name(), get_arg_route() ) if not custom_routes: custom_routes = [] if not exclusions: exclusions = [] module = importlib.import_module(module_name) custom_routes_s = [c.__name__ for r, c in custom_routes] rhs = {cls_name: cls for (cls_name, cls) in inspect.getmembers(module, inspect.isclass)} auto_routes = list(chain(*[ list(set(chain(*[ [ ( generate_auto_route( module, module_name, cls_name, method_name, url_name ), getattr(module, cls_name) ) for url_name in getattr(module, cls_name).__url_names__ ] + [ ( url, getattr(module, cls_name) ) for url in getattr(module, cls_name).__urls__ ] for method_name in HTTP_METHODS if has_method( module, cls_name, method_name) ]))) for cls_name, cls in rhs.items() if is_handler_subclass(cls) and cls_name not in (custom_routes_s + exclusions) ])) routes = auto_routes + custom_routes return routes
r"""Create and return routes for module_name Routes are (url, RequestHandler) tuples :returns: list of routes for ``module_name`` with respect to ``exclusions`` and ``custom_routes``. Returned routes are with URLs formatted such that they are forward-slash-separated by module/class level and end with the lowercase name of the RequestHandler (it will also remove 'handler' from the end of the name of the handler). For example, a requesthandler with the name ``helloworld.api.HelloWorldHandler`` would be assigned the url ``/api/helloworld``. Additionally, if a method has extra arguments aside from ``self`` in its signature, routes with URL patterns will be generated to match ``r"(?P<{}>[a-zA-Z0-9_\-]+)".format(argname)`` for each argument. The aforementioned regex will match ONLY values with alphanumeric, hyphen and underscore characters. You can provide your own pattern by setting a ``arg_pattern`` param. :rtype: [(url, RequestHandler), ... ] :type module_name: str :param module_name: Name of the module to get routes for :type custom_routes: [(str, RequestHandler), ... ] :param custom_routes: List of routes that have custom URLs and therefore should be automagically generated :type exclusions: [str, str, ...] :param exclusions: List of RequestHandler names that routes should not be generated for :type arg_pattern: str :param arg_pattern: Default pattern for extra arguments of any method
https://github.com/hfaran/tornado-json/blob/fcc551e4b78cac9245e36376329d84afda296284/tornado_json/routes.py#L43-L199
import importlib import inspect import pkgutil from functools import reduce from itertools import chain from tornado_json.constants import HTTP_METHODS from tornado_json.utils import extract_method, is_method, is_handler_subclass def get_routes(package): return list(chain(*[get_module_routes(modname) for modname in gen_submodule_names(package)])) def gen_submodule_names(package): for importer, modname, ispkg in pkgutil.walk_packages( path=package.__path__, prefix=package.__name__ + '.', onerror=lambda x: None): yield modname
MIT License
covid-19-impact-lab/sid
src/sid/rapid_tests.py
_create_sensitivity
python
def _create_sensitivity(states, sensitivity_params): sensitivity = pd.Series(np.nan, index=states.index) p_pos_preinfectious = sensitivity_params.loc["pre-infectious"] p_pos_start_infectious = sensitivity_params.loc["start_infectious"] p_pos_while_infectious = sensitivity_params.loc["while_infectious"] p_pos_after_infectious = sensitivity_params.loc["after_infectious"] sensitivity[states["cd_infectious_true"] > 0] = p_pos_preinfectious sensitivity[states["infectious"]] = p_pos_while_infectious sensitivity[states["cd_infectious_true"] == 0] = p_pos_start_infectious within_10_days = states["cd_infectious_true"].between(-10, 0) sensitivity[~states["infectious"] & within_10_days] = p_pos_after_infectious if sensitivity.isnull().any(): raise ValueError( "There are NaN left in the person-dependent sensitivity. " "The likeliest explanation is that _create_sensitivity was called " "with uninfected individuals (i.e. with individuals where " "`cd_infectious_true` < -10)." ) return sensitivity
Create the sensitivity se
https://github.com/covid-19-impact-lab/sid/blob/ac01bd49ff51cd5b1aebcc0c058c6ca0baaeb973/src/sid/rapid_tests.py#L138-L158
import itertools import warnings from typing import Callable from typing import Optional import numpy as np import pandas as pd from sid.shared import boolean_choices from sid.validation import validate_return_is_series_or_ndarray def perform_rapid_tests( date: pd.Timestamp, states: pd.DataFrame, params: pd.DataFrame, rapid_test_models: Optional[Callable], contacts: pd.DataFrame, seed: itertools.count, ) -> pd.DataFrame: if rapid_test_models: receives_rapid_test = _compute_who_receives_rapid_tests( date=date, states=states, params=params, rapid_test_models=rapid_test_models, contacts=contacts, seed=seed, ) is_tested_positive = _sample_test_outcome( states, receives_rapid_test, params, seed ) states = _update_states_with_rapid_tests_outcomes( states, receives_rapid_test, is_tested_positive ) return states def apply_reactions_to_rapid_tests( date, states, params, rapid_test_reaction_models, contacts, seed, ): if rapid_test_reaction_models: for model in rapid_test_reaction_models.values(): loc = model.get("loc", params.index) func = model["model"] if model["start"] <= date <= model["end"]: contacts = func( contacts=contacts, states=states, params=params.loc[loc], seed=next(seed), ) return contacts def _compute_who_receives_rapid_tests( date, states, params, rapid_test_models, contacts, seed ): receives_rapid_test = pd.Series(index=states.index, data=False) for name, model in rapid_test_models.items(): loc = model.get("loc", params.index) func = model["model"] if model["start"] <= date <= model["end"]: new_receives_rapid_test = func( receives_rapid_test=receives_rapid_test.copy(deep=True), states=states, params=params.loc[loc], contacts=contacts, seed=next(seed), ) new_receives_rapid_test = validate_return_is_series_or_ndarray( new_receives_rapid_test, name, "rapid_test_models", states.index ) receives_rapid_test.loc[new_receives_rapid_test] = True return receives_rapid_test def _sample_test_outcome(states, receives_rapid_test, params, seed): np.random.seed(next(seed)) is_tested_positive = pd.Series(index=states.index, data=False) with warnings.catch_warnings(): warnings.filterwarnings( "ignore", message="indexing past lexsort depth may impact performance." ) sensitivity_params = params.loc[("rapid_test", "sensitivity"), "value"] infected = states["cd_infectious_true"] >= -10 receives_test_and_is_infected = infected & receives_rapid_test sensitivity = _create_sensitivity( states=states[receives_test_and_is_infected], sensitivity_params=sensitivity_params, ) is_truly_positive = boolean_choices(sensitivity) is_tested_positive.loc[receives_test_and_is_infected] = is_truly_positive specificity = params.loc[("rapid_test", "specificity", "specificity"), "value"] uninfected_test_receivers = ~infected & receives_rapid_test p_false_positive = np.full(uninfected_test_receivers.sum(), 1 - specificity) is_falsely_positive = boolean_choices(p_false_positive) is_tested_positive.loc[uninfected_test_receivers] = is_falsely_positive return is_tested_positive
MIT License
googlecloudplatform/appengine-python-standard
src/google/appengine/ext/ndb/tasklets.py
add_flow_exception
python
def add_flow_exception(exc): global _flow_exceptions if not isinstance(exc, type) or not issubclass(exc, Exception): raise TypeError('Expected an Exception subclass, got %r' % (exc,)) as_set = set(_flow_exceptions) as_set.add(exc) _flow_exceptions = tuple(as_set)
Add an exception that should not be logged. The argument must be a subclass of Exception.
https://github.com/googlecloudplatform/appengine-python-standard/blob/42c99c7a83f4ed50c724ecdde119a606a3ca58f3/src/google/appengine/ext/ndb/tasklets.py#L215-L225
import collections import functools import logging import os import sys import threading import types import weakref from google.appengine.api import apiproxy_rpc from google.appengine.api import apiproxy_stub from google.appengine.api import apiproxy_stub_map from google.appengine.api import datastore from google.appengine.api import datastore_errors from google.appengine.api import full_app_id from google.appengine.api import namespace_manager from google.appengine.datastore import datastore_pbs from google.appengine.datastore import datastore_rpc from google.appengine.ext.ndb import eventloop from google.appengine.ext.ndb import utils from google.appengine.runtime import apiproxy import six from six.moves import map __all__ = ['Return', 'tasklet', 'synctasklet', 'toplevel', 'sleep', 'add_flow_exception', 'get_return_value', 'get_context', 'set_context', 'make_default_context', 'make_context', 'Future', 'MultiFuture', 'QueueFuture', 'SerialQueueFuture', 'ReducingFuture', ] _logging_debug = utils.logging_debug _CALLBACK_KEY = '__CALLBACK__' def _is_generator(obj): return isinstance(obj, types.GeneratorType) class _State(threading.local): def __init__(self): super(_State, self).__init__() self.current_context = None self.all_generators = weakref.WeakSet() self.all_pending = set() def set_context(self, ctx): self.current_context = ctx def add_generator(self, gen): if _CALLBACK_KEY not in os.environ: apiproxy.SetRequestEndCallback(self.reset) os.environ[_CALLBACK_KEY] = '1' _logging_debug('all_generators: add %s', gen) self.all_generators.add(gen) def add_pending(self, fut): if _CALLBACK_KEY not in os.environ: apiproxy.SetRequestEndCallback(self.reset) os.environ[_CALLBACK_KEY] = '1' _logging_debug('all_pending: add %s', fut) self.all_pending.add(fut) def remove_pending(self, fut, status='success'): if fut in self.all_pending: _logging_debug('all_pending: %s: remove %s', status, fut) self.all_pending.remove(fut) else: _logging_debug('all_pending: %s: not found %s', status, fut) def clear_all_generators(self): if self.all_generators: _logging_debug('all_generators: clear %s', self.all_generators) for gen in self.all_generators: gen.close() self.all_generators.clear() else: _logging_debug('all_generators: clear no-op') def clear_all_pending(self): if self.all_pending: _logging_debug('all_pending: clear %s', self.all_pending) self.all_pending.clear() else: _logging_debug('all_pending: clear no-op') def dump_all_pending(self, verbose=False): pending = [] for fut in self.all_pending: if verbose: line = fut.dump() + ('\n' + '-' * 40) else: line = fut.dump_stack() pending.append(line) return '\n'.join(pending) def reset(self, unused_req_id): self.current_context = None ev = eventloop.get_event_loop() ev.clear() self.clear_all_pending() self.clear_all_generators() _state = _State() _flow_exceptions = ()
Apache License 2.0
iwwxiong/flask_restapi
example/demo/scripts/test.py
UnitTest.run
python
def run(self): tests = unittest.TestLoader().discover('tests', pattern='test_*.py') unittest.TextTestRunner(verbosity=1).run(tests)
Run unit tests.
https://github.com/iwwxiong/flask_restapi/blob/57fca3bf07d913b31b6b7ef877328b0e07056c39/example/demo/scripts/test.py#L16-L19
import unittest from flask_script import Command from tests.test_renders import * class UnitTest(Command): command_name = 'runtest'
MIT License
coffeateam/coffea
coffea/lookup_tools/extractor.py
extractor.make_evaluator
python
def make_evaluator(self): if self._finalized: return evaluator(self._names, self._types, self._weights) else: raise Exception("Cannot make an evaluator from unfinalized extractor!")
produce an evaluator based on the finalized extractor
https://github.com/coffeateam/coffea/blob/6ce872671c39d788fc9fe5e981862d4c6f7658f6/coffea/lookup_tools/extractor.py#L147-L152
from __future__ import print_function import os from coffea.lookup_tools.evaluator import evaluator from coffea.lookup_tools.root_converters import convert_histo_root_file from coffea.lookup_tools.csv_converters import convert_btag_csv_file from coffea.lookup_tools.json_converters import convert_histo_json_file from coffea.lookup_tools.txt_converters import * file_converters = { "root": {"default": convert_histo_root_file, "histo": convert_histo_root_file}, "csv": {"default": convert_btag_csv_file, "btag": convert_btag_csv_file}, "json": {"default": convert_histo_json_file, "histo": convert_histo_json_file}, "txt": { "default": convert_jec_txt_file, "jec": convert_jec_txt_file, "jersf": convert_jersf_txt_file, "jr": convert_jr_txt_file, "junc": convert_junc_txt_file, "ea": convert_effective_area_file, }, } class extractor(object): def __init__(self): self._weights = [] self._names = {} self._types = [] self._filecache = {} self._finalized = False def add_weight_set(self, local_name, thetype, weights): if self._finalized: raise Exception("extractor is finalized cannot add new weights!") if local_name in self._names.keys(): raise Exception('weights name "{}" already defined'.format(local_name)) self._names[local_name] = len(self._weights) self._types.append(thetype) self._weights.append(weights) def add_weight_sets(self, weightsdescs): for weightdesc in weightsdescs: if weightdesc[0] == "#": continue temp = weightdesc.strip().split(" ") if len(temp) != 3: raise Exception( '"{}" not formatted as "<local name> <name> <weights file>"'.format( weightdesc ) ) (local_name, name, thefile) = tuple(temp) if name == "*": self.import_file(thefile) weights = self._filecache[thefile] for key, value in weights.items(): if local_name == "*": self.add_weight_set(key[0], key[1], value) else: keyfilename, keymyname = key[0], key[1] if isinstance(keyfilename, bytes): keyfilename = keyfilename.decode() if isinstance(keymyname, bytes): keymyname = keymyname.decode() self.add_weight_set(local_name + keyfilename, keymyname, value) else: weights, thetype = self.extract_from_file(thefile, name) self.add_weight_set(local_name, thetype, weights) def import_file(self, thefile): if thefile not in self._filecache.keys(): drop_gz = thefile.replace(".gz", "") file_dots = os.path.basename(drop_gz).split(".") theformat = file_dots[-1].strip() thetype = "default" if len(file_dots) > 2: thetype = file_dots[-2] self._filecache[thefile] = file_converters[theformat][thetype](thefile) def extract_from_file(self, thefile, name): self.import_file(thefile) weights = self._filecache[thefile] names = {key[0]: key[1] for key in weights.keys()} if name not in names.keys(): raise Exception('Weights named "{}" not in {}!'.format(name, thefile)) return (weights[(name, names[name])], names[name]) def finalize(self, reduce_list=None): if self._finalized: raise Exception("extractor is already finalized!") del self._filecache if reduce_list is not None: names = {} types = [] weights = [] for i, name in enumerate(reduce_list): if name not in self._names: raise Exception('Weights named "{}" not in extractor!'.format(name)) names[name] = i types.append(self._types[self._names[name]]) weights.append(self._weights[self._names[name]]) self._names = names self._types = types self._weights = weights self._finalized = True
BSD 3-Clause New or Revised License
endlesstrax/pyanchor
pyanchor/link_checker.py
LinkResults.build_results_dictionary
python
def build_results_dictionary(self) -> dict: results = {} try: for tag in self.all_atags: href = tag.get("href") parsed_url = self.check_link_for_http_scheme(href) if parsed_url is not None: parsed_url_status_code = requests.get(parsed_url).status_code if parsed_url_status_code in results: results[parsed_url_status_code].append(parsed_url) else: results[parsed_url_status_code] = [parsed_url] except: pass return results
Build the final results dictionary. Once all_tags has been populated in the AllTags base class, the final results dictionary is build by testing each link. Returns: A dictionary with the key = the HTTP response, and the value = a List of URLs that achieved that response code.
https://github.com/endlesstrax/pyanchor/blob/bd8f045ac85ad0457b5c69d6065ee78497e97b6b/pyanchor/link_checker.py#L87-L114
import requests from bs4 import BeautifulSoup import re class AllTags: def __init__(self, url: str): self.base_url = url if url.endswith("/") else url + "/" self.all_atags = self.find_all_atags(self.base_url) def __str__(self) -> str: return f"All links for {self.base_url}" def find_all_atags(self, url: str): r = requests.get(url) if r.status_code == 200: soup = BeautifulSoup(r.content, "html.parser") return soup.find_all("a") class LinkResults(AllTags): def __init__(self, url: str): super().__init__(url) self.results = self.build_results_dictionary() def check_link_for_http_scheme(self, href: str) -> str: if href.startswith(self.base_url): return href elif href.startswith("/"): href = self.base_url + href.lstrip("/") return href elif href.startswith("./"): return self.base_url + href.lstrip("./") elif href.startswith("../"): return self.base_url + href.lstrip("../") else: return None
MIT License
dbrattli/oslash
oslash/identity.py
Identity.unit
python
def unit(cls, value: TSource) -> 'Identity[TSource]': return Identity(value)
Initialize a new identity.
https://github.com/dbrattli/oslash/blob/c271c7633daf9d72393b419cfc9229e427e6a42a/oslash/identity.py#L21-L23
from functools import partial from typing import TypeVar, Generic, Callable from .typing import Functor, Monad, Applicative TSource = TypeVar('TSource') TResult = TypeVar('TResult') class Identity(Generic[TSource]): def __init__(self, value: TSource) -> None: self._value = value @classmethod
MIT License
openstack/futurist
futurist/periodics.py
PeriodicWorker.add
python
def add(self, cb, *args, **kwargs): if not six.callable(cb): raise ValueError("Periodic callback %r must be callable" % cb) missing_attrs = _check_attrs(cb) if missing_attrs: raise ValueError("Periodic callback %r missing required" " attributes %s" % (cb, missing_attrs)) if not cb._is_periodic: return None now = self._now_func() with self._waiter: cb_index = len(self._works) cb_metrics = self._INITIAL_METRICS.copy() work = Work(utils.get_callback_name(cb), cb, args, kwargs) watcher = Watcher(cb_metrics, work) self._works.append(work) self._watchers.append((cb_metrics, watcher)) if cb._periodic_run_immediately: self._immediates.append(cb_index) else: next_run = self._initial_schedule_strategy(cb, now) self._schedule.push(next_run, cb_index) self._waiter.notify_all() return watcher
Adds a new periodic callback to the current worker. Returns a :py:class:`.Watcher` if added successfully or the value ``None`` if not (or raises a ``ValueError`` if the callback is not correctly formed and/or decorated). :param cb: a callable object/method/function previously decorated with the :py:func:`.periodic` decorator :type cb: callable
https://github.com/openstack/futurist/blob/d70c7e5f4686ba0ea9235b280f8bdd07fecfeccb/futurist/periodics.py#L820-L853
import collections import fractions import functools import heapq import inspect import logging import math import random import threading from concurrent import futures try: import prettytable except ImportError: prettytable = None import six import futurist from futurist import _utils as utils LOG = logging.getLogger(__name__) class NeverAgain(Exception): _REQUIRED_ATTRS = ('_is_periodic', '_periodic_spacing', '_periodic_run_immediately') _DEFAULT_COLS = ('Name', 'Active', 'Periodicity', 'Runs in', 'Runs', 'Failures', 'Successes', 'Stop Requested', 'Average elapsed', 'Average elapsed waiting') PERIODIC = 'periodic' IMMEDIATE = 'immediate' class Work(collections.namedtuple("Work", ['name', 'callback', 'args', 'kwargs'])): def __call__(self): return self.callback(*self.args, **self.kwargs) class Watcher(object): def __init__(self, metrics, work): self._metrics = metrics self._work = work def __repr__(self): return ("<Watcher(metrics=%(metrics)s, work=%(work)s)" " object at 0x%(ident)x>") % dict(ident=id(self), work=self._work, metrics=self._metrics) @property def requested_stop(self): return self._metrics['requested_stop'] @property def work(self): return self._work @property def runs(self): return self._metrics['runs'] @property def successes(self): return self._metrics['successes'] @property def failures(self): return self._metrics['failures'] @property def elapsed(self): return self._metrics['elapsed'] @property def elapsed_waiting(self): return self._metrics['elapsed_waiting'] @property def average_elapsed_waiting(self): return self._metrics['elapsed_waiting'] / self._metrics['runs'] @property def average_elapsed(self): return self._metrics['elapsed'] / self._metrics['runs'] def _check_attrs(obj): missing_attrs = [] for attr_name in _REQUIRED_ATTRS: if not hasattr(obj, attr_name): missing_attrs.append(attr_name) return missing_attrs def is_periodic(obj): return callable(obj) and not _check_attrs(obj) def periodic(spacing, run_immediately=False, enabled=True): if spacing <= 0 and enabled: raise ValueError("Periodicity/spacing must be greater than" " zero instead of %s" % spacing) def wrapper(f): f._is_periodic = enabled f._periodic_spacing = spacing f._periodic_run_immediately = run_immediately @six.wraps(f) def decorator(*args, **kwargs): return f(*args, **kwargs) return decorator return wrapper def _add_jitter(max_percent_jitter): if max_percent_jitter > 1 or max_percent_jitter < 0: raise ValueError("Invalid 'max_percent_jitter', must be greater or" " equal to 0.0 and less than or equal to 1.0") def wrapper(func): rnd = random.SystemRandom() @six.wraps(func) def decorator(cb, started_at, finished_at, metrics): next_run = func(cb, started_at, finished_at, metrics) how_often = cb._periodic_spacing jitter = how_often * (rnd.random() * max_percent_jitter) return next_run + jitter decorator.__name__ += "_with_jitter" return decorator return wrapper def _last_finished_strategy(cb, started_at, finished_at, metrics): how_often = cb._periodic_spacing return finished_at + how_often def _last_started_strategy(cb, started_at, finished_at, metrics): how_often = cb._periodic_spacing return started_at + how_often def _aligned_last_finished_strategy(cb, started_at, finished_at, metrics): how_often = cb._periodic_spacing aligned_finished_at = finished_at - math.fmod(finished_at, how_often) return aligned_finished_at + how_often def _now_plus_periodicity(cb, now): how_often = cb._periodic_spacing return how_often + now class _Schedule(object): def __init__(self): self._ordering = [] def push(self, next_run, index): heapq.heappush(self._ordering, (next_run, index)) def __len__(self): return len(self._ordering) def fetch_next_run(self, index): for (next_run, a_index) in self._ordering: if a_index == index: return next_run return None def pop(self): return heapq.heappop(self._ordering) def _on_failure_log(log, cb, kind, spacing, exc_info, traceback=None): cb_name = utils.get_callback_name(cb) if all(exc_info) or not traceback: log.error("Failed to call %s '%s' (it runs every %0.2f" " seconds)", kind, cb_name, spacing, exc_info=exc_info) else: log.error("Failed to call %s '%s' (it runs every %0.2f" " seconds):\n%s", kind, cb_name, spacing, traceback) class _Runner(object): def __init__(self, now_func, retain_traceback=True): self.now_func = now_func self.retain_traceback = retain_traceback def run(self, work): failure = None started_at = self.now_func() try: work() except Exception: failure = utils.Failure(self.retain_traceback) finished_at = self.now_func() return (started_at, finished_at, failure) def _build(now_func, works, next_run_scheduler): schedule = _Schedule() now = None immediates = collections.deque() for index, work in enumerate(works): cb = work.callback if cb._periodic_run_immediately: immediates.append(index) else: if now is None: now = now_func() next_run = next_run_scheduler(cb, now) schedule.push(next_run, index) return immediates, schedule _SCHEDULE_RETRY_EXCEPTIONS = (RuntimeError, futurist.RejectedSubmission) class ExecutorFactory(object): shutdown = True def __call__(self): raise NotImplementedError() class ExistingExecutor(ExecutorFactory): def __init__(self, executor, shutdown=False): self._executor = executor self.shutdown = shutdown def __call__(self): return self._executor class PeriodicWorker(object): MAX_LOOP_IDLE = 30 _NO_OP_ARGS = () _NO_OP_KWARGS = {} _INITIAL_METRICS = { 'runs': 0, 'elapsed': 0, 'elapsed_waiting': 0, 'failures': 0, 'successes': 0, 'requested_stop': False, } _RESCHEDULE_DELAY = 0.9 _RESCHEDULE_JITTER = 0.2 DEFAULT_JITTER = fractions.Fraction(5, 100) BUILT_IN_STRATEGIES = { 'last_started': ( _last_started_strategy, _now_plus_periodicity, ), 'last_started_jitter': ( _add_jitter(DEFAULT_JITTER)(_last_started_strategy), _now_plus_periodicity, ), 'last_finished': ( _last_finished_strategy, _now_plus_periodicity, ), 'last_finished_jitter': ( _add_jitter(DEFAULT_JITTER)(_last_finished_strategy), _now_plus_periodicity, ), 'aligned_last_finished': ( _aligned_last_finished_strategy, _now_plus_periodicity, ), 'aligned_last_finished_jitter': ( _add_jitter(DEFAULT_JITTER)(_aligned_last_finished_strategy), _now_plus_periodicity, ), } @classmethod def create(cls, objects, exclude_hidden=True, log=None, executor_factory=None, cond_cls=threading.Condition, event_cls=threading.Event, schedule_strategy='last_started', now_func=utils.now, on_failure=None, args=_NO_OP_ARGS, kwargs=_NO_OP_KWARGS): callables = [] for obj in objects: for (name, member) in inspect.getmembers(obj): if name.startswith("_") and exclude_hidden: continue if six.callable(member): missing_attrs = _check_attrs(member) if not missing_attrs: callables.append((member, args, kwargs)) return cls(callables, log=log, executor_factory=executor_factory, cond_cls=cond_cls, event_cls=event_cls, schedule_strategy=schedule_strategy, now_func=now_func, on_failure=on_failure) def __init__(self, callables, log=None, executor_factory=None, cond_cls=threading.Condition, event_cls=threading.Event, schedule_strategy='last_started', now_func=utils.now, on_failure=None): if on_failure is not None and not six.callable(on_failure): raise ValueError("On failure callback %r must be" " callable" % on_failure) self._tombstone = event_cls() self._waiter = cond_cls() self._dead = event_cls() self._active = event_cls() self._cond_cls = cond_cls self._watchers = [] self._works = [] for (cb, args, kwargs) in callables: if not six.callable(cb): raise ValueError("Periodic callback %r must be callable" % cb) missing_attrs = _check_attrs(cb) if missing_attrs: raise ValueError("Periodic callback %r missing required" " attributes %s" % (cb, missing_attrs)) if cb._is_periodic: if args is None: args = self._NO_OP_ARGS if kwargs is None: kwargs = self._NO_OP_KWARGS.copy() cb_metrics = self._INITIAL_METRICS.copy() work = Work(utils.get_callback_name(cb), cb, args, kwargs) watcher = Watcher(cb_metrics, work) self._works.append(work) self._watchers.append((cb_metrics, watcher)) try: strategy = self.BUILT_IN_STRATEGIES[schedule_strategy] self._schedule_strategy = strategy[0] self._initial_schedule_strategy = strategy[1] except KeyError: valid_strategies = sorted(self.BUILT_IN_STRATEGIES.keys()) raise ValueError("Scheduling strategy '%s' must be one of" " %s selectable strategies" % (schedule_strategy, valid_strategies)) self._immediates, self._schedule = _build( now_func, self._works, self._initial_schedule_strategy) self._log = log or LOG if executor_factory is None: executor_factory = lambda: futurist.SynchronousExecutor() if on_failure is None: on_failure = functools.partial(_on_failure_log, self._log) self._on_failure = on_failure self._executor_factory = executor_factory self._now_func = now_func def __len__(self): return len(self._works) def _run(self, executor, runner, auto_stop_when_empty): barrier = utils.Barrier(cond_cls=self._cond_cls) rnd = random.SystemRandom() def _process_scheduled(): with self._waiter: while (not self._schedule and not self._tombstone.is_set() and not self._immediates): self._waiter.wait(self.MAX_LOOP_IDLE) if self._tombstone.is_set(): return if self._immediates: return submitted_at = now = self._now_func() next_run, index = self._schedule.pop() when_next = next_run - now if when_next <= 0: work = self._works[index] self._log.debug("Submitting periodic" " callback '%s'", work.name) try: fut = executor.submit(runner.run, work) except _SCHEDULE_RETRY_EXCEPTIONS as exc: delay = (self._RESCHEDULE_DELAY + rnd.random() * self._RESCHEDULE_JITTER) self._log.error("Failed to submit periodic callback " "'%s', retrying after %.2f sec. " "Error: %s", work.name, delay, exc) self._schedule.push(self._now_func() + delay, index) else: barrier.incr() fut.add_done_callback(functools.partial(_on_done, PERIODIC, work, index, submitted_at)) else: self._schedule.push(next_run, index) when_next = min(when_next, self.MAX_LOOP_IDLE) self._waiter.wait(when_next) def _process_immediates(): with self._waiter: try: index = self._immediates.popleft() except IndexError: pass else: work = self._works[index] submitted_at = self._now_func() self._log.debug("Submitting immediate" " callback '%s'", work.name) try: fut = executor.submit(runner.run, work) except _SCHEDULE_RETRY_EXCEPTIONS as exc: self._log.error("Failed to submit immediate callback " "'%s', retrying. Error: %s", work.name, exc) self._immediates.append(index) else: barrier.incr() fut.add_done_callback(functools.partial(_on_done, IMMEDIATE, work, index, submitted_at)) def _on_done(kind, work, index, submitted_at, fut): cb = work.callback started_at, finished_at, failure = fut.result() cb_metrics, _watcher = self._watchers[index] cb_metrics['runs'] += 1 schedule_again = True if failure is not None: if not issubclass(failure.exc_type, NeverAgain): cb_metrics['failures'] += 1 try: self._on_failure(cb, kind, cb._periodic_spacing, failure.exc_info, traceback=failure.traceback) except Exception as exc: self._log.error("On failure callback %r raised an" " unhandled exception. Error: %s", self._on_failure, exc) else: cb_metrics['successes'] += 1 schedule_again = False self._log.debug("Periodic callback '%s' raised " "'NeverAgain' " "exception, stopping any further " "execution of it.", work.name) else: cb_metrics['successes'] += 1 elapsed = max(0, finished_at - started_at) elapsed_waiting = max(0, started_at - submitted_at) cb_metrics['elapsed'] += elapsed cb_metrics['elapsed_waiting'] += elapsed_waiting with self._waiter: with barrier.decr_cm() as am_left: if schedule_again: next_run = self._schedule_strategy(cb, started_at, finished_at, cb_metrics) self._schedule.push(next_run, index) else: cb_metrics['requested_stop'] = True if (am_left <= 0 and len(self._immediates) == 0 and len(self._schedule) == 0 and auto_stop_when_empty): self._tombstone.set() self._waiter.notify_all() try: while not self._tombstone.is_set(): _process_immediates() _process_scheduled() finally: barrier.wait() def _on_finish(self): if not self._log.isEnabledFor(logging.DEBUG): return cols = list(_DEFAULT_COLS) for c in ['Runs in', 'Active', 'Periodicity']: cols.remove(c) self._log.debug( "Stopped running %s callbacks:\n%s", len(self._works), self.pformat(columns=cols) if prettytable else "statistics not available, PrettyTable missing" ) def pformat(self, columns=_DEFAULT_COLS): if prettytable is None: raise ImportError( "PrettyTable is required to use the pformat method") if not isinstance(columns, (list, tuple)): columns = list(columns) if not columns: raise ValueError("At least one of %s columns must" " be provided" % (set(_DEFAULT_COLS))) for c in columns: if c not in _DEFAULT_COLS: raise ValueError("Unknown column '%s', valid column names" " are %s" % (c, set(_DEFAULT_COLS))) tbl_rows = [] now = self._now_func() for index, work in enumerate(self._works): _cb_metrics, watcher = self._watchers[index] next_run = self._schedule.fetch_next_run(index) if watcher.requested_stop: active = False runs_in = 'n/a' elif next_run is None: active = True runs_in = 'n/a' else: active = False runs_in = "%0.4fs" % (max(0.0, next_run - now)) cb_row = { 'Name': work.name, 'Active': active, 'Periodicity': work.callback._periodic_spacing, 'Runs': watcher.runs, 'Runs in': runs_in, 'Failures': watcher.failures, 'Successes': watcher.successes, 'Stop Requested': watcher.requested_stop, } try: cb_row_avgs = [ "%0.4fs" % watcher.average_elapsed, "%0.4fs" % watcher.average_elapsed_waiting, ] except ZeroDivisionError: cb_row_avgs = ['.', '.'] cb_row['Average elapsed'] = cb_row_avgs[0] cb_row['Average elapsed waiting'] = cb_row_avgs[1] tbl_rows.append(cb_row) tbl = prettytable.PrettyTable(columns) for cb_row in tbl_rows: tbl_row = [] for c in columns: tbl_row.append(cb_row[c]) tbl.add_row(tbl_row) return tbl.get_string()
Apache License 2.0
zebrafishlabs/fastly-python
fastly/__init__.py
FastlyConnection.create_condition
python
def create_condition( self, service_id, version_number, name, _type, statement, priority="10", comment=None): body = self._formdata({ "name": name, "type": _type, "statement": statement, "priority": priority, "comment": comment, }, FastlyCondition.FIELDS) content = self._fetch("/service/%s/version/%d/condition" % (service_id, version_number), method="POST", body=body) return FastlyCondition(self, content)
Creates a new condition.
https://github.com/zebrafishlabs/fastly-python/blob/72be5db55819c0bd4316ab00170446d4707b5d06/fastly/__init__.py#L245-L263
from datetime import datetime import httplib2 import json import re import urllib from version import __version__ FASTLY_SCHEME = "https" FASTLY_HOST = "api.fastly.com" FASTLY_SESSION_REGEX = re.compile("(fastly\.session=[^;]+);") class FastlyRoles(object): USER = "user" BILLING = "billing" ENGINEER = "engineer" SUPERUSER = "superuser" class FastlyCacheSettingsAction(object): CACHE = "cache" PASS = "pass" RESTART = "restart" class FastlyConditionType(object): RESPONSE = "response" CACHE = "cache" REQUEST = "request" FETCH = "fetch" class FastlyHeaderAction(object): SET = "set" APPEND = "append" DELETE = "delete" REGEX = "regex" REGEX_ALL = "regex_repeat" class FastlyHeaderType(object): RESPONSE = "response" FETCH = "fetch" CACHE = "cache" REQUEST = "request" class FastlyRequestSettingAction(object): LOOKUP = "lookup" PASS = "pass" class FastlyForwardedForAction(object): CLEAR = "clear" LEAVE = "leave" APPEND = "append" APPEND_ALL = "append_all" OVERWRITE = "overwrite" class FastlyStatsType(object): ALL = "all" DAILY = "daily" HOURLY = "hourly" MINUTELY = "minutely" class FastlyDirectorType(object): RANDOM = 1 ROUNDROBIN = 2 HASH = 3 CLIENT = 4 class FastlyConnection(object): def __init__(self, api_key): self._session = None self._api_key = api_key self._fully_authed = False @property def fully_authed(self): return self._fully_authed def login(self, user, password): body = self._formdata({ "user": user, "password": password, }, ["user", "password"]) content = self._fetch("/login", method="POST", body=body) self._fully_authed = True return FastlySession(self, content) def list_backends(self, service_id, version_number): content = self._fetch("/service/%s/version/%d/backend" % (service_id, version_number)) return map(lambda x: FastlyBackend(self, x), content) def create_backend( self, service_id, version_number, name, address, use_ssl=False, port=80, connect_timeout=1000, first_byte_timeout=15000, between_bytes_timeout=10000, error_threshold=0, max_conn=20, weight=100, auto_loadbalance=False, shield=None, request_condition=None, healthcheck=None, comment=None, ssl_cert_hostname=None, ssl_sni_hostname=None, min_tls_version=None, max_tls_version=None,): body = self._formdata({ "name": name, "address": address, "use_ssl": use_ssl, "port": port, "connect_timeout": connect_timeout, "first_byte_timeout": first_byte_timeout, "between_bytes_timeout": between_bytes_timeout, "error_threshold": error_threshold, "max_conn": max_conn, "weight": weight, "auto_loadbalance": auto_loadbalance, "shield": shield, "request_condition": request_condition, "healthcheck": healthcheck, "comment": comment, "ssl_cert_hostname": ssl_cert_hostname, "ssl_sni_hostname": ssl_sni_hostname, "min_tls_version": min_tls_version, "max_tls_version": max_tls_version, }, FastlyBackend.FIELDS) content = self._fetch("/service/%s/version/%d/backend" % (service_id, version_number), method="POST", body=body) return FastlyBackend(self, content) def get_backend(self, service_id, version_number, name): content = self._fetch("/service/%s/version/%d/backend/%s" % (service_id, version_number, urllib.quote(name, safe=''))) return FastlyBackend(self, content) def update_backend(self, service_id, version_number, name_key, **kwargs): body = self._formdata(kwargs, FastlyBackend.FIELDS) content = self._fetch("/service/%s/version/%d/backend/%s" % (service_id, version_number, urllib.quote(name_key, safe='')), method="PUT", body=body) return FastlyBackend(self, content) def delete_backend(self, service_id, version_number, name): content = self._fetch("/service/%s/version/%d/backend/%s" % (service_id, version_number, urllib.quote(name, safe='')), method="DELETE") return self._status(content) def check_backends(self, service_id, version_number): content = self._fetch("/service/%s/version/%d/backend/check_all" % (service_id, version_number)) return content def list_cache_settings(self, service_id, version_number): content = self._fetch("/service/%s/version/%d/cache_settings" % (service_id, version_number)) return map(lambda x: FastlyCacheSettings(self, x), content) def create_cache_settings( self, service_id, version_number, name, action, ttl=None, stale_ttl=None, cache_condition=None): body = self._formdata({ "name": name, "action": action, "ttl": ttl, "stale_ttl": stale_ttl, "cache_condition": cache_condition, }, FastlyCacheSettings.FIELDS) content = self._fetch("/service/%s/version/%d/cache_settings" % (service_id, version_number), method="POST", body=body) return FastlyCacheSettings(self, content) def get_cache_settings(self, service_id, version_number, name): content = self._fetch("/service/%s/version/%d/cache_settings/%s" % (service_id, version_number, urllib.quote(name, safe=''))) return FastlyCacheSettings(self, content) def update_cache_settings(self, service_id, version_number, name_key, **kwargs): body = self._formdata(kwargs, FastlyCacheSettings.FIELDS) content = self._fetch("/service/%s/version/%d/cache_settings/%s" % (service_id, version_number, urllib.quote(name_key, safe='')), method="PUT", body=body) return FastlyCacheSettings(self, content) def delete_cache_settings(self, service_id, version_number, name): content = self._fetch("/service/%s/version/%d/cache_settings/%s" % (service_id, version_number, urllib.quote(name, safe='')), method="DELETE") return self._status(content) def list_conditions(self, service_id, version_number): content = self._fetch("/service/%s/version/%d/condition" % (service_id, version_number)) return map(lambda x: FastlyCondition(self, x), content)
BSD 2-Clause Simplified License
pymzml/pymzml
pymzml/file_classes/indexedGzip.py
IndexedGzip.__del__
python
def __del__(self): self.Reader.close() self.file_handler.close()
Close handlers when deleting object.
https://github.com/pymzml/pymzml/blob/ba552046719f754ffc3bc7cbdc2977b5f3363ae6/pymzml/file_classes/indexedGzip.py#L53-L56
import codecs import gzip from xml.etree.ElementTree import XML from .. import spec from ..utils.GSGR import GSGR class IndexedGzip: def __init__(self, path, encoding): self.path = path self.file_handler = codecs.getreader(encoding)(gzip.open(path)) self.offset_dict = dict() self._build_index()
MIT License
dallinger/dallinger
dallinger/mturk.py
MTurkService.increment_named_qualification_score
python
def increment_named_qualification_score(self, name, worker_id, notify=False): result = self.current_named_qualification_score(name, worker_id) current_score = result["score"] or 0 new_score = current_score + 1 qtype_id = result["qtype"]["id"] self.assign_qualification(qtype_id, worker_id, new_score, notify) return {"qtype": result["qtype"], "score": new_score}
Increment the current qualification score for a worker, on a qualification with the provided name.
https://github.com/dallinger/dallinger/blob/2bc309c422935d372a7568cc18340e3b5b3f6a21/dallinger/mturk.py#L389-L399
import boto3 import datetime import logging import time from botocore.exceptions import ClientError from botocore.exceptions import NoCredentialsError from cached_property import cached_property logger = logging.getLogger(__file__) PERCENTAGE_APPROVED_REQUIREMENT_ID = "000000000000000000L0" LOCALE_REQUIREMENT_ID = "00000000000000000071" MAX_SUPPORTED_BATCH_SIZE = 100 class MTurkServiceException(Exception): class RemoteAPICallTimedOut(MTurkServiceException): class DuplicateQualificationNameError(MTurkServiceException): class QualificationNotFoundException(MTurkServiceException): class WorkerLacksQualification(MTurkServiceException): class RevokedQualification(MTurkServiceException): class NonExistentSubscription(MTurkServiceException): class SNSService(object): max_wait_secs = 12 def __init__( self, aws_access_key_id, aws_secret_access_key, region_name, confirm=True ): self.aws_key = aws_access_key_id self.aws_secret = aws_secret_access_key self.region_name = region_name self.do_confirm_subscription = confirm @cached_property def _sns(self): session = boto3.session.Session( aws_access_key_id=self.aws_key, aws_secret_access_key=self.aws_secret, region_name=self.region_name, ) return session.client("sns") def confirm_subscription(self, token, topic): logger.warning("Confirming SNS subsription.") self._sns.confirm_subscription( Token=token, TopicArn=topic, ) def create_subscription(self, experiment_id, notification_url): logger.warning( "Creating new SNS subscription for {}...".format(notification_url) ) protocol = "https" if notification_url.startswith("https") else "http" topic = self._sns.create_topic(Name=experiment_id) subscription = self._sns.subscribe( TopicArn=topic["TopicArn"], Protocol=protocol, Endpoint=notification_url, ReturnSubscriptionArn=True, ) start = time.time() while self._awaiting_confirmation(subscription): elapsed = time.time() - start if elapsed > self.max_wait_secs: raise RemoteAPICallTimedOut("Too long") logger.warning("Awaiting SNS subscription confirmation...") time.sleep(1) logger.warning("Subscription confirmed.") return topic["TopicArn"] def cancel_subscription(self, experiment_id): logger.warning("Cancelling SNS subscription") topic_id = self._get_sns_topic_for_experiment(experiment_id) if topic_id is None: raise NonExistentSubscription( "No SNS subscription found for {}".format(experiment_id) ) self._sns.delete_topic(TopicArn=topic_id) return True def _awaiting_confirmation(self, subscription): if not self.do_confirm_subscription: return False report = self._sns.get_subscription_attributes( SubscriptionArn=subscription["SubscriptionArn"] ) status = report["Attributes"]["PendingConfirmation"] return status == "true" def _get_sns_topic_for_experiment(self, experiment_id): experiment_topics = ( t for t in self._all_topics() if t.endswith(":" + experiment_id) ) try: return next(experiment_topics) except StopIteration: return None def _all_topics(self): done = False next_token = None while not done: if next_token is not None: response = self._sns.list_topics(NextToken=next_token) else: response = self._sns.list_topics() if response: for t in response["Topics"]: yield t["TopicArn"] if "NextToken" in response: next_token = response["NextToken"] else: done = True class MTurkQuestions(object): @staticmethod def external(ad_url, frame_height=600): q = ( '<ExternalQuestion xmlns="http://mechanicalturk.amazonaws.com/' 'AWSMechanicalTurkDataSchemas/2006-07-14/ExternalQuestion.xsd">' "<ExternalURL>{}</ExternalURL>" "<FrameHeight>{}</FrameHeight></ExternalQuestion>" ) return q.format(ad_url, frame_height) @staticmethod def compensation(title="Compensation HIT", sandbox=False, frame_height=600): if sandbox: action = "https://workersandbox.mturk.com/mturk/externalSubmit" else: action = "https://www.mturk.com/mturk/externalSubmit" q = ( '<HTMLQuestion xmlns="http://mechanicalturk.amazonaws.com/AWSMechanicalTurkDataSchemas/2011-11-11/HTMLQuestion.xsd">' "<HTMLContent><![CDATA[<!DOCTYPE html><html>" "<head>" '<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>' '<script type="text/javascript" src="https://s3.amazonaws.com/mturk-public/externalHIT_v1.js"></script>' "</head>" "<body>" '<form name="mturk_form" method="post" id="mturk_form" action="{}">' '<input type="hidden" value="" name="assignmentId" id="assignmentId"/>' "<h1>{}</h1>" "<p>We are sorry that you encountered difficulties with our experiment. " "We will compensate you immediately upon submission of this HIT.</p>" '<input type="hidden" name="some-input-required" value="anything" ></input>' '<input type="submit" id="submitButton" value="Submit" /></p></form>' '<script language="Javascript">turkSetAssignmentID();</script>' "</body></html>]]>" "</HTMLContent>" "<FrameHeight>{}</FrameHeight>" "</HTMLQuestion>" ) return q.format(action, title, frame_height) class MTurkQualificationRequirements(object): @staticmethod def min_approval(percentage): return { "QualificationTypeId": PERCENTAGE_APPROVED_REQUIREMENT_ID, "Comparator": "GreaterThanOrEqualTo", "IntegerValues": [percentage], "RequiredToPreview": True, } @staticmethod def restrict_to_countries(countries): return { "QualificationTypeId": LOCALE_REQUIREMENT_ID, "Comparator": "EqualTo", "LocaleValues": [{"Country": country} for country in countries], "RequiredToPreview": True, } @staticmethod def must_have(qualification_id): return { "QualificationTypeId": qualification_id, "Comparator": "Exists", "RequiredToPreview": True, } @staticmethod def must_not_have(qualification_id): return { "QualificationTypeId": qualification_id, "Comparator": "DoesNotExist", "RequiredToPreview": True, } class MTurkService(object): def __init__( self, aws_access_key_id, aws_secret_access_key, region_name, sandbox=True, max_wait_secs=0, ): self.aws_key = aws_access_key_id self.aws_secret = aws_secret_access_key self.region_name = region_name self.is_sandbox = sandbox self.max_wait_secs = max_wait_secs @cached_property def mturk(self): session = boto3.session.Session( aws_access_key_id=self.aws_key, aws_secret_access_key=self.aws_secret, region_name=self.region_name, ) return session.client( "mturk", endpoint_url=self.host, region_name=self.region_name ) @cached_property def sns(self): return SNSService( aws_access_key_id=self.aws_key, aws_secret_access_key=self.aws_secret, region_name=self.region_name, ) @property def host(self): if self.is_sandbox: template = u"https://mturk-requester-sandbox.{}.amazonaws.com" else: template = u"https://mturk-requester.{}.amazonaws.com" return template.format(self.region_name) def account_balance(self): response = self.mturk.get_account_balance() return float(response["AvailableBalance"]) def check_credentials(self): try: return bool(self.mturk.get_account_balance()) except NoCredentialsError: raise MTurkServiceException("No AWS credentials set!") except ClientError: raise MTurkServiceException("Invalid AWS credentials!") except Exception as ex: raise MTurkServiceException( "Error checking credentials: {}".format(str(ex)) ) def confirm_subscription(self, token, topic): self.sns.confirm_subscription(token=token, topic=topic) def create_qualification_type(self, name, description, status="Active"): try: response = self.mturk.create_qualification_type( Name=name, Description=description, QualificationTypeStatus=status ) except Exception as ex: if "already created a QualificationType with this name" in str(ex): raise DuplicateQualificationNameError(str(ex)) else: raise return self._translate_qtype(response["QualificationType"]) def get_qualification_type_by_name(self, name): max_fuzzy_matches_to_check = 100 query = name.upper() args = { "Query": query, "MustBeRequestable": False, "MustBeOwnedByCaller": True, "MaxResults": max_fuzzy_matches_to_check, } results = self.mturk.list_qualification_types(**args)["QualificationTypes"] start = time.time() while not results: elapsed = time.time() - start if elapsed > self.max_wait_secs: return None time.sleep(1) results = self.mturk.list_qualification_types(**args)["QualificationTypes"] qualifications = [self._translate_qtype(r) for r in results] if len(qualifications) > 1: for qualification in qualifications: if qualification["name"].upper() == query: return qualification raise MTurkServiceException("{} was not a unique name".format(query)) return qualifications[0] def assign_qualification(self, qualification_id, worker_id, score, notify=False): return self._is_ok( self.mturk.associate_qualification_with_worker( QualificationTypeId=qualification_id, WorkerId=worker_id, IntegerValue=score, SendNotification=notify, ) ) def assign_named_qualification(self, name, worker_id, score, notify=False): qtype = self.get_qualification_type_by_name(name) if qtype is None: raise QualificationNotFoundException( 'No Qualification exists with name "{}"'.format(name) ) return self._is_ok( self.mturk.associate_qualification_with_worker( QualificationTypeId=qtype["id"], WorkerId=worker_id, IntegerValue=score, SendNotification=notify, ) ) def increment_qualification_score(self, qualification_id, worker_id, notify=False): try: current_score = self.current_qualification_score( qualification_id, worker_id ) except (WorkerLacksQualification, RevokedQualification): current_score = 0 new_score = current_score + 1 self.assign_qualification(qualification_id, worker_id, new_score, notify) return {"qtype": qualification_id, "score": new_score}
MIT License
openstack/murano
murano/db/migration/alembic_migrations/versions/005_environment-template.py
upgrade
python
def upgrade(): op.create_table( 'environment-template', sa.Column('created', sa.DateTime(), nullable=False), sa.Column('updated', sa.DateTime(), nullable=False), sa.Column('id', sa.String(length=255), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('tenant_id', sa.String(length=36), nullable=False), sa.Column('version', sa.BigInteger(), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('networking', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('tenant_id', 'name'), mysql_engine=MYSQL_ENGINE, mysql_charset=MYSQL_CHARSET )
It creates the table environment-template. The name plus the tenant_id should be unique in the table, since each tenant cannot duplicate template names.
https://github.com/openstack/murano/blob/314c85db8addae184a77c8b47217b1f28e4a1b67/murano/db/migration/alembic_migrations/versions/005_environment-template.py#L33-L54
from alembic import op import sqlalchemy as sa revision = '005' down_revision = '004' MYSQL_ENGINE = 'InnoDB' MYSQL_CHARSET = 'utf8'
Apache License 2.0
hyperledger/aries-cloudagent-python
aries_cloudagent/wallet/askar.py
AskarWallet.verify_message
python
async def verify_message( self, message: Union[List[bytes], bytes], signature: bytes, from_verkey: str, key_type: KeyType, ) -> bool: if not from_verkey: raise WalletError("Verkey not provided") if not signature: raise WalletError("Signature not provided") if not message: raise WalletError("Message not provided") verkey = b58_to_bytes(from_verkey) if key_type == KeyType.ED25519: try: pk = Key.from_public_bytes(KeyAlg.ED25519, verkey) return pk.verify_signature(message, signature) except AskarError as err: raise WalletError("Exception when verifying message signature") from err return verify_signed_message( message=message, signature=signature, verkey=verkey, key_type=key_type, )
Verify a signature against the public key of the signer. Args: message: The message to verify signature: The signature to verify from_verkey: Verkey to use in verification key_type: The key type to derive the signature verification algorithm from Returns: True if verified, else False Raises: WalletError: If the verkey is not provided WalletError: If the signature is not provided WalletError: If the message is not provided WalletError: If another backend error occurs
https://github.com/hyperledger/aries-cloudagent-python/blob/fec69f1a2301e4745fc9d40cea190050e3f595fa/aries_cloudagent/wallet/askar.py#L606-L654
import asyncio import json import logging from typing import List, Sequence, Tuple, Union from aries_askar import ( AskarError, AskarErrorCode, Entry, Key, KeyAlg, SeedMethod, Session, ) from ..askar.didcomm.v1 import pack_message, unpack_message from ..askar.profile import AskarProfileSession from ..did.did_key import DIDKey from ..ledger.base import BaseLedger from ..ledger.endpoint_type import EndpointType from ..ledger.error import LedgerConfigError from ..storage.askar import AskarStorage from ..storage.base import StorageRecord, StorageDuplicateError, StorageNotFoundError from .base import BaseWallet, KeyInfo, DIDInfo from .crypto import ( sign_message, validate_seed, verify_signed_message, ) from .did_method import DIDMethod from .error import WalletError, WalletDuplicateError, WalletNotFoundError from .key_type import KeyType from .util import b58_to_bytes, bytes_to_b58 CATEGORY_DID = "did" CATEGORY_CONFIG = "config" RECORD_NAME_PUBLIC_DID = "default_public_did" LOGGER = logging.getLogger(__name__) class AskarWallet(BaseWallet): def __init__(self, session: AskarProfileSession): self._session = session @property def session(self) -> Session: return self._session async def create_signing_key( self, key_type: KeyType, seed: str = None, metadata: dict = None ) -> KeyInfo: if metadata is None: metadata = {} try: keypair = _create_keypair(key_type, seed) verkey = bytes_to_b58(keypair.get_public_bytes()) await self._session.handle.insert_key( verkey, keypair, metadata=json.dumps(metadata) ) except AskarError as err: if err.code == AskarErrorCode.DUPLICATE: raise WalletDuplicateError( "Verification key already present in wallet" ) from None raise WalletError("Error creating signing key") from err return KeyInfo(verkey=verkey, metadata=metadata, key_type=key_type) async def get_signing_key(self, verkey: str) -> KeyInfo: if not verkey: raise WalletNotFoundError("No key identifier provided") key = await self._session.handle.fetch_key(verkey) if not key: raise WalletNotFoundError("Unknown key: {}".format(verkey)) metadata = json.loads(key.metadata or "{}") return KeyInfo(verkey=verkey, metadata=metadata, key_type=KeyType.ED25519) async def replace_signing_key_metadata(self, verkey: str, metadata: dict): if not verkey: raise WalletNotFoundError("No key identifier provided") key = await self._session.handle.fetch_key(verkey, for_update=True) if not key: raise WalletNotFoundError("Keypair not found") await self._session.handle.update_key( verkey, metadata=json.dumps(metadata or {}), tags=key.tags ) async def create_local_did( self, method: DIDMethod, key_type: KeyType, seed: str = None, did: str = None, metadata: dict = None, ) -> DIDInfo: if not method.supports_key_type(key_type): raise WalletError( f"Invalid key type {key_type.key_type}" f" for DID method {method.method_name}" ) if method == DIDMethod.KEY and did: raise WalletError("Not allowed to set DID for DID method 'key'") if not metadata: metadata = {} if method not in [DIDMethod.SOV, DIDMethod.KEY]: raise WalletError( f"Unsupported DID method for askar storage: {method.method_name}" ) try: keypair = _create_keypair(key_type, seed) verkey_bytes = keypair.get_public_bytes() verkey = bytes_to_b58(verkey_bytes) try: await self._session.handle.insert_key( verkey, keypair, metadata=json.dumps(metadata) ) except AskarError as err: if err.code == AskarErrorCode.DUPLICATE: pass else: raise WalletError("Error inserting key") from err if method == DIDMethod.KEY: did = DIDKey.from_public_key(verkey_bytes, key_type).did elif not did: did = bytes_to_b58(verkey_bytes[:16]) item = await self._session.handle.fetch(CATEGORY_DID, did, for_update=True) if item: did_info = item.value_json if did_info.get("verkey") != verkey: raise WalletDuplicateError("DID already present in wallet") if did_info.get("metadata") != metadata: did_info["metadata"] = metadata await self._session.handle.replace( CATEGORY_DID, did, value_json=did_info, tags=item.tags ) else: await self._session.handle.insert( CATEGORY_DID, did, value_json={ "did": did, "method": method.method_name, "verkey": verkey, "verkey_type": key_type.key_type, "metadata": metadata, }, tags={ "method": method.method_name, "verkey": verkey, "verkey_type": key_type.key_type, }, ) except AskarError as err: raise WalletError("Error when creating local DID") from err return DIDInfo( did=did, verkey=verkey, metadata=metadata, method=method, key_type=key_type ) async def get_local_dids(self) -> Sequence[DIDInfo]: ret = [] for item in await self._session.handle.fetch_all(CATEGORY_DID): ret.append(_load_did_entry(item)) return ret async def get_local_did(self, did: str) -> DIDInfo: if not did: raise WalletNotFoundError("No identifier provided") try: did = await self._session.handle.fetch(CATEGORY_DID, did) except AskarError as err: raise WalletError("Error when fetching local DID") from err if not did: raise WalletNotFoundError("Unknown DID: {}".format(did)) return _load_did_entry(did) async def get_local_did_for_verkey(self, verkey: str) -> DIDInfo: try: dids = await self._session.handle.fetch_all( CATEGORY_DID, {"verkey": verkey}, limit=1 ) except AskarError as err: raise WalletError("Error when fetching local DID for verkey") from err if dids: return _load_did_entry(dids[0]) raise WalletNotFoundError("No DID defined for verkey: {}".format(verkey)) async def replace_local_did_metadata(self, did: str, metadata: dict): try: item = await self._session.handle.fetch(CATEGORY_DID, did, for_update=True) if not item: raise WalletNotFoundError("Unknown DID: {}".format(did)) from None entry_val = item.value_json if entry_val["metadata"] != metadata: entry_val["metadata"] = metadata await self._session.handle.replace( CATEGORY_DID, did, value_json=entry_val, tags=item.tags ) except AskarError as err: raise WalletError("Error updating DID metadata") from err async def get_public_did(self) -> DIDInfo: public_did = None public_info = None public_item = None storage = AskarStorage(self._session) try: public_item = await storage.get_record( CATEGORY_CONFIG, RECORD_NAME_PUBLIC_DID ) except StorageNotFoundError: dids = await self.get_local_dids() for info in dids: if info.metadata.get("public"): public_did = info.did public_info = info break try: await storage.add_record( StorageRecord( type=CATEGORY_CONFIG, id=RECORD_NAME_PUBLIC_DID, value=json.dumps({"did": public_did}), ) ) except StorageDuplicateError: public_item = await storage.get_record( CATEGORY_CONFIG, RECORD_NAME_PUBLIC_DID ) if public_item: public_did = json.loads(public_item.value)["did"] if public_did: try: public_info = await self.get_local_did(public_did) except WalletNotFoundError: pass return public_info async def set_public_did(self, did: Union[str, DIDInfo]) -> DIDInfo: if isinstance(did, str): try: item = await self._session.handle.fetch( CATEGORY_DID, did, for_update=True ) except AskarError as err: raise WalletError("Error when fetching local DID") from err if not item: raise WalletNotFoundError("Unknown DID: {}".format(did)) info = _load_did_entry(item) else: info = did item = None if info.method != DIDMethod.SOV: raise WalletError("Setting public DID is only allowed for did:sov DIDs") public = await self.get_public_did() if not public or public.did != info.did: storage = AskarStorage(self._session) if not info.metadata.get("posted"): metadata = {**info.metadata, "posted": True} if item: entry_val = item.value_json entry_val["metadata"] = metadata await self._session.handle.replace( CATEGORY_DID, did, value_json=entry_val, tags=item.tags ) else: await self.replace_local_did_metadata(info.did, metadata) info = info._replace( metadata=metadata, ) await storage.update_record( StorageRecord( type=CATEGORY_CONFIG, id=RECORD_NAME_PUBLIC_DID, value="{}", ), value=json.dumps({"did": info.did}), tags=None, ) public = info return public async def set_did_endpoint( self, did: str, endpoint: str, ledger: BaseLedger, endpoint_type: EndpointType = None, ): did_info = await self.get_local_did(did) if did_info.method != DIDMethod.SOV: raise WalletError("Setting DID endpoint is only allowed for did:sov DIDs") metadata = {**did_info.metadata} if not endpoint_type: endpoint_type = EndpointType.ENDPOINT if endpoint_type == EndpointType.ENDPOINT: metadata[endpoint_type.indy] = endpoint wallet_public_didinfo = await self.get_public_did() if ( wallet_public_didinfo and wallet_public_didinfo.did == did ) or did_info.metadata.get("posted"): if not ledger: raise LedgerConfigError( f"No ledger available but DID {did} is public: missing wallet-type?" ) if not ledger.read_only: async with ledger: await ledger.update_endpoint_for_did(did, endpoint, endpoint_type) await self.replace_local_did_metadata(did, metadata) async def rotate_did_keypair_start(self, did: str, next_seed: str = None) -> str: did_method = DIDMethod.from_did(did) if not did_method.supports_rotation: raise WalletError( f"DID method '{did_method.method_name}' does not support key rotation." ) keypair = _create_keypair(KeyType.ED25519, next_seed) verkey = bytes_to_b58(keypair.get_public_bytes()) try: await self._session.handle.insert_key( verkey, keypair, ) except AskarError as err: if err.code == AskarErrorCode.DUPLICATE: pass else: raise WalletError( "Error when creating new keypair for local DID" ) from err try: item = await self._session.handle.fetch(CATEGORY_DID, did, for_update=True) if not item: raise WalletNotFoundError("Unknown DID: {}".format(did)) from None entry_val = item.value_json metadata = entry_val.get("metadata", {}) metadata["next_verkey"] = verkey entry_val["metadata"] = metadata await self._session.handle.replace( CATEGORY_DID, did, value_json=entry_val, tags=item.tags ) except AskarError as err: raise WalletError("Error updating DID metadata") from err return verkey async def rotate_did_keypair_apply(self, did: str) -> DIDInfo: try: item = await self._session.handle.fetch(CATEGORY_DID, did, for_update=True) if not item: raise WalletNotFoundError("Unknown DID: {}".format(did)) from None entry_val = item.value_json metadata = entry_val.get("metadata", {}) next_verkey = metadata.get("next_verkey") if not next_verkey: raise WalletError("Cannot rotate DID key: no next key established") del metadata["next_verkey"] entry_val["verkey"] = next_verkey item.tags["verkey"] = next_verkey await self._session.handle.replace( CATEGORY_DID, did, value_json=entry_val, tags=item.tags ) except AskarError as err: raise WalletError("Error updating DID metadata") from err async def sign_message( self, message: Union[List[bytes], bytes], from_verkey: str ) -> bytes: if not message: raise WalletError("Message not provided") if not from_verkey: raise WalletError("Verkey not provided") try: keypair = await self._session.handle.fetch_key(from_verkey) if not keypair: raise WalletNotFoundError("Missing key for sign operation") key = keypair.key if key.algorithm == KeyAlg.BLS12_381_G2: return sign_message( message=message, secret=key.get_secret_bytes(), key_type=KeyType.BLS12381G2, ) else: return key.sign_message(message) except AskarError as err: raise WalletError("Exception when signing message") from err
Apache License 2.0
awni/backgammon
simple-bkg/submission.py
simpleEvaluation
python
def simpleEvaluation(state, evalArgs=None): game,player = state numHome = 0 for c in range(12,16): col = game.grid[c] if len(col)>=1 and col[0]==player: numHome += len(col) V = 10*len(game.offPieces[player]) V += numHome V -= .1*len(game.barPieces[player]) return V
Evaluates the current game state with a simple heuristic. @param state : Tuple of (game,player), the game is a game object (see game.py for details, and player in {'o','x'} designates whose turn it is. @returns V : (scalar) evaluation of current game state
https://github.com/awni/backgammon/blob/aee5843b23f542b95e42549091d3f11d30b5af3e/simple-bkg/submission.py#L10-L31
import agent import numpy as np
MIT License