repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
etiennemd/alexa-ecovacs
Deploy/_pytest/assertion/rewrite.py
_format_assertmsg
python
def _format_assertmsg(obj): if isinstance(obj, six.text_type) or isinstance(obj, six.binary_type): s = obj is_repr = False else: s = py.io.saferepr(obj) is_repr = True if isinstance(s, six.text_type): t = six.text_type else: t = six.binary_type s = s.replace(t("\n"), t("\n~")).replace(t("%"), t("%%")) if is_repr: s = s.replace(t("\\n"), t("\n~")) return s
Format the custom assertion message given. For strings this simply replaces newlines with '\n~' so that util.format_explanation() will preserve them instead of escaping newlines. For other objects py.io.saferepr() is used first.
https://github.com/etiennemd/alexa-ecovacs/blob/d0ee083c3d0728ebbfda3f41ae84979c6aad36d7/Deploy/_pytest/assertion/rewrite.py#L415-L440
from __future__ import absolute_import, division, print_function import ast import errno import itertools import imp import marshal import os import re import six import struct import sys import types import py from _pytest.assertion import util if hasattr(imp, "get_tag"): PYTEST_TAG = imp.get_tag() + "-PYTEST" else: if hasattr(sys, "pypy_version_info"): impl = "pypy" elif sys.platform == "java": impl = "jython" else: impl = "cpython" ver = sys.version_info PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1]) del ver, impl PYC_EXT = ".py" + (__debug__ and "c" or "o") PYC_TAIL = "." + PYTEST_TAG + PYC_EXT ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3 if sys.version_info >= (3, 5): ast_Call = ast.Call else: def ast_Call(a, b, c): return ast.Call(a, b, c, None, None) class AssertionRewritingHook(object): def __init__(self, config): self.config = config self.fnpats = config.getini("python_files") self.session = None self.modules = {} self._rewritten_names = set() self._register_with_pkg_resources() self._must_rewrite = set() def set_session(self, session): self.session = session def find_module(self, name, path=None): state = self.config._assertstate state.trace("find_module called for: %s" % name) names = name.rsplit(".", 1) lastname = names[-1] pth = None if path is not None: path = list(path) if len(path) == 1: pth = path[0] if pth is None: try: fd, fn, desc = imp.find_module(lastname, path) except ImportError: return None if fd is not None: fd.close() tp = desc[2] if tp == imp.PY_COMPILED: if hasattr(imp, "source_from_cache"): try: fn = imp.source_from_cache(fn) except ValueError: fn = fn[:-1] else: fn = fn[:-1] elif tp != imp.PY_SOURCE: return None else: fn = os.path.join(pth, name.rpartition(".")[2] + ".py") fn_pypath = py.path.local(fn) if not self._should_rewrite(name, fn_pypath, state): return None self._rewritten_names.add(name) write = not sys.dont_write_bytecode cache_dir = os.path.join(fn_pypath.dirname, "__pycache__") if write: try: os.mkdir(cache_dir) except OSError: e = sys.exc_info()[1].errno if e == errno.EEXIST: pass elif e in [errno.ENOENT, errno.ENOTDIR]: write = False elif e in [errno.EACCES, errno.EROFS, errno.EPERM]: state.trace("read only directory: %r" % fn_pypath.dirname) write = False else: raise cache_name = fn_pypath.basename[:-3] + PYC_TAIL pyc = os.path.join(cache_dir, cache_name) co = _read_pyc(fn_pypath, pyc, state.trace) if co is None: state.trace("rewriting %r" % (fn,)) source_stat, co = _rewrite_test(self.config, fn_pypath) if co is None: return None if write: _make_rewritten_pyc(state, source_stat, pyc, co) else: state.trace("found cached rewritten pyc for %r" % (fn,)) self.modules[name] = co, pyc return self def _should_rewrite(self, name, fn_pypath, state): fn = str(fn_pypath) if fn_pypath.basename == 'conftest.py': state.trace("rewriting conftest file: %r" % (fn,)) return True if self.session is not None: if self.session.isinitpath(fn): state.trace("matched test file (was specified on cmdline): %r" % (fn,)) return True for pat in self.fnpats: if fn_pypath.fnmatch(pat): state.trace("matched test file %r" % (fn,)) return True for marked in self._must_rewrite: if name == marked or name.startswith(marked + '.'): state.trace("matched marked file %r (from %r)" % (name, marked)) return True return False def mark_rewrite(self, *names): already_imported = (set(names) .intersection(sys.modules) .difference(self._rewritten_names)) for name in already_imported: if not AssertionRewriter.is_rewrite_disabled( sys.modules[name].__doc__ or ""): self._warn_already_imported(name) self._must_rewrite.update(names) def _warn_already_imported(self, name): self.config.warn( 'P1', 'Module already imported so cannot be rewritten: %s' % name) def load_module(self, name): if name in sys.modules: return sys.modules[name] co, pyc = self.modules.pop(name) mod = sys.modules[name] = imp.new_module(name) try: mod.__file__ = co.co_filename mod.__cached__ = pyc mod.__loader__ = self py.builtin.exec_(co, mod.__dict__) except: if name in sys.modules: del sys.modules[name] raise return sys.modules[name] def is_package(self, name): try: fd, fn, desc = imp.find_module(name) except ImportError: return False if fd is not None: fd.close() tp = desc[2] return tp == imp.PKG_DIRECTORY @classmethod def _register_with_pkg_resources(cls): try: import pkg_resources pkg_resources.__name__ except ImportError: return pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider) def get_data(self, pathname): with open(pathname, 'rb') as f: return f.read() def _write_pyc(state, co, source_stat, pyc): try: fp = open(pyc, "wb") except IOError: err = sys.exc_info()[1].errno state.trace("error writing pyc file at %s: errno=%s" % (pyc, err)) return False try: fp.write(imp.get_magic()) mtime = int(source_stat.mtime) size = source_stat.size & 0xFFFFFFFF fp.write(struct.pack("<ll", mtime, size)) marshal.dump(co, fp) finally: fp.close() return True RN = "\r\n".encode("utf-8") N = "\n".encode("utf-8") cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+") BOM_UTF8 = '\xef\xbb\xbf' def _rewrite_test(config, fn): state = config._assertstate try: stat = fn.stat() source = fn.read("rb") except EnvironmentError: return None, None if ASCII_IS_DEFAULT_ENCODING: end1 = source.find("\n") end2 = source.find("\n", end1 + 1) if (not source.startswith(BOM_UTF8) and cookie_re.match(source[0:end1]) is None and cookie_re.match(source[end1 + 1:end2]) is None): if hasattr(state, "_indecode"): return None, None state._indecode = True try: try: source.decode("ascii") except UnicodeDecodeError: return None, None finally: del state._indecode try: tree = ast.parse(source) except SyntaxError: state.trace("failed to parse: %r" % (fn,)) return None, None rewrite_asserts(tree, fn, config) try: co = compile(tree, fn.strpath, "exec", dont_inherit=True) except SyntaxError: state.trace("failed to compile: %r" % (fn,)) return None, None return stat, co def _make_rewritten_pyc(state, source_stat, pyc, co): if sys.platform.startswith("win"): _write_pyc(state, co, source_stat, pyc) else: proc_pyc = pyc + "." + str(os.getpid()) if _write_pyc(state, co, source_stat, proc_pyc): os.rename(proc_pyc, pyc) def _read_pyc(source, pyc, trace=lambda x: None): try: fp = open(pyc, "rb") except IOError: return None with fp: try: mtime = int(source.mtime()) size = source.size() data = fp.read(12) except EnvironmentError as e: trace('_read_pyc(%s): EnvironmentError %s' % (source, e)) return None if (len(data) != 12 or data[:4] != imp.get_magic() or struct.unpack("<ll", data[4:]) != (mtime, size)): trace('_read_pyc(%s): invalid or out of date pyc' % source) return None try: co = marshal.load(fp) except Exception as e: trace('_read_pyc(%s): marshal.load error %s' % (source, e)) return None if not isinstance(co, types.CodeType): trace('_read_pyc(%s): not a code object' % source) return None return co def rewrite_asserts(mod, module_path=None, config=None): AssertionRewriter(module_path, config).run(mod) def _saferepr(obj): repr = py.io.saferepr(obj) if isinstance(repr, six.text_type): t = six.text_type else: t = six.binary_type return repr.replace(t("\n"), t("\\n")) from _pytest.assertion.util import format_explanation as _format_explanation
MIT License
django-fluent/django-fluent-contents
fluent_contents/models/managers.py
ContentItemQuerySet.move_to_placeholder
python
def move_to_placeholder(self, placeholder, sort_order=None): qs = self.all() for item in qs: item.move_to_placeholder(placeholder, sort_order=sort_order) if sort_order is not None: sort_order += 1 return qs
.. versionadded: 1.0.2 Move the entire queryset to a new object. Returns a queryset with the newly created objects.
https://github.com/django-fluent/django-fluent-contents/blob/b82a527a4a1007a62857ba9f6980d6c4482e8f2d/fluent_contents/models/managers.py#L112-L125
from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.db import models from django.utils.translation import get_language from future.builtins import str from future.utils import string_types from parler import appsettings as parler_appsettings from parler.utils import get_language_title from polymorphic.managers import PolymorphicManager from polymorphic.query import PolymorphicQuerySet class PlaceholderManager(models.Manager): def parent(self, parent_object): lookup = get_parent_lookup_kwargs(parent_object) return self.filter(**lookup) def get_by_slot(self, parent_object, slot): placeholder = self.parent(parent_object).get(slot=slot) placeholder.parent = parent_object return placeholder def create_for_object(self, parent_object, slot, role="m", title=None): from .db import Placeholder parent_attrs = get_parent_lookup_kwargs(parent_object) obj = self.create( slot=slot, role=role or Placeholder.MAIN, title=title or slot.title().replace("_", " "), **parent_attrs ) obj.parent = parent_object return obj class ContentItemQuerySet(PolymorphicQuerySet): def translated(self, *language_codes): if not language_codes: language_codes = (get_language(),) else: for language_code in language_codes: if not isinstance( language_code, string_types ) or language_code.lower() in ("1", "0", "true", "false"): raise ValueError( "ContentItemQuerySet.translated() expected language_code to be an ISO code" ) if len(language_codes) == 1: return self.filter(language_code=language_codes[0]) else: return self.filter(language_code__in=language_codes) def parent(self, parent_object, limit_parent_language=True): lookup = get_parent_lookup_kwargs(parent_object) if limit_parent_language: language_code = get_parent_language_code(parent_object) if language_code: lookup["language_code"] = language_code return self.filter(**lookup) def clear_cache(self): for contentitem in self: contentitem.clear_cache() clear_cache.alters_data = True
Apache License 2.0
bearle/django_mail_admin
django_mail_admin/cache.py
get_cache_key
python
def get_cache_key(name): return 'django_mail_admin:template:%s' % (slugify(name))
Prefixes and slugify the key name
https://github.com/bearle/django_mail_admin/blob/5286e1d556ba5b696a343a8be0a11617ffd4d193/django_mail_admin/cache.py#L10-L15
from django.template.defaultfilters import slugify from .settings import get_cache_backend cache_backend = get_cache_backend()
MIT License
hazyresearch/flyingsquid
flyingsquid/_graphs.py
Mixin._is_separator
python
def _is_separator(self, srcSet, dstSet, separatorSet): def neighbors(node): neighbor_set = set() for edge in self.G.edges: if edge[0] == node: neighbor_set.add(edge[1]) if edge[1] == node: neighbor_set.add(edge[0]) return list(neighbor_set) visited = set() for srcNode in srcSet: if srcNode in dstSet: return False queue = [srcNode] curNode = srcNode while len(queue) > 0: curNode = queue.pop() if curNode not in visited: visited.add(curNode) else: continue for neighbor in neighbors(curNode): if neighbor == srcNode: continue if neighbor in dstSet: return False if neighbor in separatorSet: continue if neighbor not in visited: queue.append(neighbor) return True
Check if separatorSet separates srcSet from dstSet. Tries to find a path from some node in srcSet to some node in dstSet that doesn't pass through separatorSet. If successful, return False. Otherwise, return True.
https://github.com/hazyresearch/flyingsquid/blob/28a713a9ac501b7597c2489468ae189943d00685/flyingsquid/_graphs.py#L16-L56
from pgmpy.models import MarkovModel from pgmpy.factors.discrete import JointProbabilityDistribution, DiscreteFactor from itertools import combinations from flyingsquid.helpers import * import numpy as np import math from tqdm import tqdm import sys import random class Mixin:
Apache License 2.0
picklepete/pyicloud
pyicloud/services/drive.py
DriveNode.open
python
def open(self, **kwargs): if self.data["size"] == 0: response = Response() response.raw = io.BytesIO() return response return self.connection.get_file(self.data["docwsid"], **kwargs)
Gets the node file.
https://github.com/picklepete/pyicloud/blob/bab549a593b1f2554de8d0eefa0b053c18e09f6f/pyicloud/services/drive.py#L263-L270
from datetime import datetime, timedelta import json import io import mimetypes import os import time from re import search from requests import Response from six import PY2 class DriveService(object): def __init__(self, service_root, document_root, session, params): self._service_root = service_root self._document_root = document_root self.session = session self.params = dict(params) self._root = None def _get_token_from_cookie(self): for cookie in self.session.cookies: if cookie.name == "X-APPLE-WEBAUTH-VALIDATE": match = search(r"\bt=([^:]+)", cookie.value) if match is None: raise Exception("Can't extract token from %r" % cookie.value) return {"token": match.group(1)} raise Exception("Token cookie not found") def get_node_data(self, node_id): request = self.session.post( self._service_root + "/retrieveItemDetailsInFolders", params=self.params, data=json.dumps( [ { "drivewsid": "FOLDER::com.apple.CloudDocs::%s" % node_id, "partialData": False, } ] ), ) return request.json()[0] def get_file(self, file_id, **kwargs): file_params = dict(self.params) file_params.update({"document_id": file_id}) response = self.session.get( self._document_root + "/ws/com.apple.CloudDocs/download/by_id", params=file_params, ) if not response.ok: return None url = response.json()["data_token"]["url"] return self.session.get(url, params=self.params, **kwargs) def get_app_data(self): request = self.session.get( self._service_root + "/retrieveAppLibraries", params=self.params ) return request.json()["items"] def _get_upload_contentws_url(self, file_object): content_type = mimetypes.guess_type(file_object.name)[0] if content_type is None: content_type = "" orig_pos = file_object.tell() file_object.seek(0, os.SEEK_END) file_size = file_object.tell() file_object.seek(orig_pos, os.SEEK_SET) file_params = self.params file_params.update(self._get_token_from_cookie()) request = self.session.post( self._document_root + "/ws/com.apple.CloudDocs/upload/web", params=file_params, headers={"Content-Type": "text/plain"}, data=json.dumps( { "filename": file_object.name, "type": "FILE", "content_type": content_type, "size": file_size, } ), ) if not request.ok: return None return (request.json()[0]["document_id"], request.json()[0]["url"]) def _update_contentws(self, folder_id, sf_info, document_id, file_object): data = { "data": { "signature": sf_info["fileChecksum"], "wrapping_key": sf_info["wrappingKey"], "reference_signature": sf_info["referenceChecksum"], "size": sf_info["size"], }, "command": "add_file", "create_short_guid": True, "document_id": document_id, "path": {"starting_document_id": folder_id, "path": file_object.name,}, "allow_conflict": True, "file_flags": { "is_writable": True, "is_executable": False, "is_hidden": False, }, "mtime": int(time.time() * 1000), "btime": int(time.time() * 1000), } if sf_info.get("receipt"): data["data"].update({"receipt": sf_info["receipt"]}) request = self.session.post( self._document_root + "/ws/com.apple.CloudDocs/update/documents", params=self.params, headers={"Content-Type": "text/plain"}, data=json.dumps(data), ) if not request.ok: return None return request.json() def send_file(self, folder_id, file_object): document_id, content_url = self._get_upload_contentws_url(file_object) request = self.session.post(content_url, files={file_object.name: file_object}) if not request.ok: return None content_response = request.json()["singleFile"] self._update_contentws(folder_id, content_response, document_id, file_object) def create_folders(self, parent, name): request = self.session.post( self._service_root + "/createFolders", params=self.params, headers={"Content-Type": "text/plain"}, data=json.dumps( { "destinationDrivewsId": parent, "folders": [{"clientId": self.params["clientId"], "name": name,}], } ), ) return request.json() def rename_items(self, node_id, etag, name): request = self.session.post( self._service_root + "/renameItems", params=self.params, data=json.dumps( {"items": [{"drivewsid": node_id, "etag": etag, "name": name,}],} ), ) return request.json() def move_items_to_trash(self, node_id, etag): request = self.session.post( self._service_root + "/moveItemsToTrash", params=self.params, data=json.dumps( { "items": [ { "drivewsid": node_id, "etag": etag, "clientId": self.params["clientId"], } ], } ), ) return request.json() @property def root(self): if not self._root: self._root = DriveNode(self, self.get_node_data("root")) return self._root def __getattr__(self, attr): return getattr(self.root, attr) def __getitem__(self, key): return self.root[key] class DriveNode(object): def __init__(self, conn, data): self.data = data self.connection = conn self._children = None @property def name(self): if "extension" in self.data: return "%s.%s" % (self.data["name"], self.data["extension"]) return self.data["name"] @property def type(self): node_type = self.data.get("type") return node_type and node_type.lower() def get_children(self): if not self._children: if "items" not in self.data: self.data.update(self.connection.get_node_data(self.data["docwsid"])) if "items" not in self.data: raise KeyError("No items in folder, status: %s" % self.data["status"]) self._children = [ DriveNode(self.connection, item_data) for item_data in self.data["items"] ] return self._children @property def size(self): size = self.data.get("size") if not size: return None return int(size) @property def date_changed(self): return _date_to_utc(self.data.get("dateChanged")) @property def date_modified(self): return _date_to_utc(self.data.get("dateModified")) @property def date_last_open(self): return _date_to_utc(self.data.get("lastOpenTime"))
MIT License
tebeka/pythonwise
cheetah-demo/Cheetah/SettingsManager.py
_SettingsCollector.readSettingsFromConfigFileObj
python
def readSettingsFromConfigFileObj(self, inFile, convert=True): p = self._ConfigParserClass() p.readfp(inFile) sects = p.sections() newSettings = {} sects = p.sections() newSettings = {} for s in sects: newSettings[s] = {} for o in p.options(s): if o != '__name__': newSettings[s][o] = p.get(s,o) for sect, subDict in newSettings.items(): for key, val in subDict.items(): if convert: if val.lower().startswith('python:'): subDict[key] = eval(val[7:],{},{}) if val.lower() == 'none': subDict[key] = None if val.lower() == 'true': subDict[key] = True if val.lower() == 'false': subDict[key] = False if stringIsNumber(val): subDict[key] = convStringToNum(val) if key.lower() == 'importsettings': if val.find(';') < 0: importedSettings = self.readSettingsFromPySrcFile(val) else: path = val.split(';')[0] rest = ''.join(val.split(';')[1:]).strip() parentDict = self.readSettingsFromPySrcFile(path) importedSettings = eval('parentDict["' + rest + '"]') subDict.update(mergeNestedDictionaries(subDict, importedSettings)) if sect.lower() == 'globals': newSettings.update(newSettings[sect]) del newSettings[sect] return newSettings
Return the settings from a config file that uses the syntax accepted by Python's standard ConfigParser module (like Windows .ini files). NOTE: this method maintains case unlike the ConfigParser module, unless this class was initialized with the 'caseSensitive' keyword set to False. All setting values are initially parsed as strings. However, If the 'convert' arg is True this method will do the following value conversions: * all Python numeric literals will be coverted from string to number * The string 'None' will be converted to the Python value None * The string 'True' will be converted to a Python truth value * The string 'False' will be converted to a Python false value * Any string starting with 'python:' will be treated as a Python literal or expression that needs to be eval'd. This approach is useful for declaring lists and dictionaries. If a config section titled 'Globals' is present the options defined under it will be treated as top-level settings.
https://github.com/tebeka/pythonwise/blob/3bc3735160a1761be83751126f99180ed353b2e4/cheetah-demo/Cheetah/SettingsManager.py#L351-L428
__author__ = "Tavis Rudd <tavis@damnsimple.com>" __revision__ = "$Revision: 1.29 $"[11:-2] import sys import os.path import copy as copyModule from ConfigParser import ConfigParser import re from tokenize import Intnumber, Floatnumber, Number from types import * import types import new import tempfile import time from StringIO import StringIO import imp try: import threading from threading import Lock except: class Lock: def acquire(self): pass def release(self): pass class BaseErrorClass: pass try: True,False except NameError: True, False = (1==1),(1==0) numberRE = re.compile(Number) complexNumberRE = re.compile('[\(]*' +Number + r'[ \t]*\+[ \t]*' + Number + '[\)]*') convertableToStrTypes = (StringType, IntType, FloatType, LongType, ComplexType, NoneType, UnicodeType) def mergeNestedDictionaries(dict1, dict2, copy=False, deepcopy=False): if copy: dict1 = copyModule.copy(dict1) elif deepcopy: dict1 = copyModule.deepcopy(dict1) for key,val in dict2.items(): if dict1.has_key(key) and type(val) == types.DictType and type(dict1[key]) == types.DictType: dict1[key] = mergeNestedDictionaries(dict1[key], val) else: dict1[key] = val return dict1 def stringIsNumber(S): S = S.strip() if S[0] in '-+' and len(S) > 1: S = S[1:].strip() match = complexNumberRE.match(S) if not match: match = numberRE.match(S) if not match or (match.end() != len(S)): return False else: return True def convStringToNum(theString): if not stringIsNumber(theString): raise Error(theString + ' cannot be converted to a Python number') return eval(theString, {}, {}) ident = r'[_a-zA-Z][_a-zA-Z0-9]*' firstChunk = r'^(?P<indent>\s*)(?P<class>[_a-zA-Z][_a-zA-Z0-9]*)' customClassRe = re.compile(firstChunk + r'\s*:') baseClasses = r'(?P<bases>\(\s*([_a-zA-Z][_a-zA-Z0-9]*\s*(,\s*[_a-zA-Z][_a-zA-Z0-9]*\s*)*)\))' customClassWithBasesRe = re.compile(firstChunk + baseClasses + '\s*:') def translateClassBasedConfigSyntax(src): outputLines = [] for line in src.splitlines(): if customClassRe.match(line) and line.strip().split(':')[0] not in ('else','try', 'except', 'finally'): line = customClassRe.sub( r'\g<indent>class \g<class>(SettingsContainer):', line) elif customClassWithBasesRe.match(line) and not line.strip().startswith('except'): line = customClassWithBasesRe.sub( r'\g<indent>class \g<class>\g<bases>:', line) outputLines.append(line) if outputLines[0].find('class ') == -1: initLine = 'from Cheetah.SettingsManager import SettingsContainer; True, False = 1, 0; ' else: initLine = 'from Cheetah.SettingsManager import SettingsContainer; True, False = 1, 0\n' return initLine + '\n'.join(outputLines) + '\n' class Error(BaseErrorClass): pass class NoDefault: pass class ConfigParserCaseSensitive(ConfigParser): def optionxform(self, optionstr): return optionstr class SettingsContainer: pass class _SettingsCollector: _sysPathLock = Lock() _ConfigParserClass = ConfigParserCaseSensitive def __init__(self): pass def normalizePath(self, path): return os.path.normpath(path.replace("\\",'/')) def readSettingsFromContainer(self, container, ignoreUnderscored=True): S = {} if type(container) == ModuleType: attrs = vars(container) else: attrs = self._getAllAttrsFromContainer(container) for k, v in attrs.items(): if (ignoreUnderscored and k.startswith('_')) or v is SettingsContainer: continue if self._isContainer(v): S[k] = self.readSettingsFromContainer(v) else: S[k] = v return S readSettingsFromModule = readSettingsFromContainer def _isContainer(self, thing): return type(thing) == ModuleType or ( type(thing) == ClassType and issubclass(thing, SettingsContainer) ) def _getAllAttrsFromContainer(self, container): attrs = container.__dict__.copy() attrs.update( container().__dict__ ) for base in container.__bases__: for k, v in base.__dict__.items(): if not attrs.has_key(k): attrs[k] = v return attrs def readSettingsFromPySrcFile(self, path): path = self.normalizePath(path) dirName = os.path.dirname(path) tmpPath = tempfile.mkstemp('webware_temp') pySrc = translateClassBasedConfigSyntax(open(path).read()) modName = path.replace('.','_').replace('/','_').replace('\\','_') open(tmpPath, 'w').write(pySrc) try: fp = open(tmpPath) self._sysPathLock.acquire() sys.path.insert(0, dirName) module = imp.load_source(modName, path, fp) newSettings = self.readSettingsFromModule(module) del sys.path[0] self._sysPathLock.release() return newSettings finally: fp.close() try: os.remove(tmpPath) except: pass if os.path.exists(tmpPath + 'c'): try: os.remove(tmpPath + 'c') except: pass if os.path.exists(path + 'c'): try: os.remove(path + 'c') except: pass def readSettingsFromPySrcStr(self, theString): globalsDict = {'True':1, 'False':0, 'SettingsContainer':SettingsContainer, } newSettings = {'self':self} exec theString in globalsDict, newSettings del newSettings['self'], newSettings['True'], newSettings['False'] module = new.module('temp_settings_module') module.__dict__.update(newSettings) return self.readSettingsFromModule(module) def readSettingsFromConfigFile(self, path, convert=True): path = self.normalizePath(path) fp = open(path) settings = self.readSettingsFromConfigFileObj(fp, convert=convert) fp.close() return settings
BSD 3-Clause New or Revised License
jahjajaka/afternoon_cleaner
object_detection/tpu_exporters/export_saved_model_tpu_lib.py
run_inference
python
def run_inference(inputs, pipeline_config_file, ckpt_path, input_type='encoded_image_string_tensor', use_bfloat16=False, repeat=1): pipeline_config, meta_arch = parse_pipeline_config(pipeline_config_file) shapes_info = model_map[meta_arch].get_prediction_tensor_shapes( pipeline_config) with tf.Graph().as_default(), tf.Session() as sess: placeholder_tensor, result_tensor_dict = model_map[meta_arch].build_graph( pipeline_config, shapes_info, input_type, use_bfloat16) saver = tf.train.Saver() init_op = tf.global_variables_initializer() sess.run(tf.contrib.tpu.initialize_system()) sess.run(init_op) if ckpt_path is not None: saver.restore(sess, ckpt_path) for _ in range(repeat): tensor_dict_out = sess.run( result_tensor_dict, feed_dict={placeholder_tensor: [inputs]}) sess.run(tf.contrib.tpu.shutdown_system()) return tensor_dict_out
Runs inference on TPU. Args: inputs: Input image with the same type as `input_type` pipeline_config_file: Pipeline config file name. ckpt_path: Training checkpoint path. input_type: One of 'encoded_image_string_tensor': a 1d tensor with dtype=tf.string 'image_tensor': a 4d tensor with dtype=tf.uint8 'tf_example': a 1d tensor with dtype=tf.string use_bfloat16: If true, use tf.bfloat16 on TPU. repeat: Number of times to repeat running the provided input for profiling. Returns: A dict of resulting tensors.
https://github.com/jahjajaka/afternoon_cleaner/blob/590bdf58a216cbc6cfc47ef8f49d7af3df3703b7/object_detection/tpu_exporters/export_saved_model_tpu_lib.py#L128-L175
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from google.protobuf import text_format from tensorflow.python.saved_model import loader from tensorflow.python.saved_model import signature_constants from tensorflow.python.saved_model import tag_constants from object_detection.protos import pipeline_pb2 from object_detection.tpu_exporters import faster_rcnn from object_detection.tpu_exporters import ssd model_map = { 'faster_rcnn': faster_rcnn, 'ssd': ssd, } def parse_pipeline_config(pipeline_config_file): with tf.gfile.GFile(pipeline_config_file, 'r') as config_file: config_str = config_file.read() pipeline_config = pipeline_pb2.TrainEvalPipelineConfig() text_format.Merge(config_str, pipeline_config) meta_arch = pipeline_config.model.WhichOneof('model') return pipeline_config, meta_arch def export(pipeline_config_file, ckpt_path, export_dir, input_placeholder_name='placeholder_tensor', input_type='encoded_image_string_tensor', use_bfloat16=False): pipeline_config, meta_arch = parse_pipeline_config(pipeline_config_file) shapes_info = model_map[meta_arch].get_prediction_tensor_shapes( pipeline_config) with tf.Graph().as_default(), tf.Session() as sess: placeholder_tensor, result_tensor_dict = model_map[meta_arch].build_graph( pipeline_config, shapes_info, input_type, use_bfloat16) saver = tf.train.Saver() init_op = tf.global_variables_initializer() sess.run(init_op) if ckpt_path is not None: saver.restore(sess, ckpt_path) builder = tf.saved_model.builder.SavedModelBuilder(export_dir) tensor_info_inputs = { input_placeholder_name: tf.saved_model.utils.build_tensor_info(placeholder_tensor) } tensor_info_outputs = { k: tf.saved_model.utils.build_tensor_info(v) for k, v in result_tensor_dict.items() } detection_signature = ( tf.saved_model.signature_def_utils.build_signature_def( inputs=tensor_info_inputs, outputs=tensor_info_outputs, method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME)) tf.logging.info('Inputs:\n{}\nOutputs:{}\nPredict method name:{}'.format( tensor_info_inputs, tensor_info_outputs, tf.saved_model.signature_constants.PREDICT_METHOD_NAME)) builder.add_meta_graph_and_variables( sess, [ tf.saved_model.tag_constants.SERVING, tf.saved_model.tag_constants.TPU ], signature_def_map={ tf.saved_model.signature_constants .DEFAULT_SERVING_SIGNATURE_DEF_KEY: detection_signature, }, strip_default_attrs=True) builder.add_meta_graph( [tf.saved_model.tag_constants.SERVING], signature_def_map={ tf.saved_model.signature_constants .DEFAULT_SERVING_SIGNATURE_DEF_KEY: detection_signature, }, strip_default_attrs=True) builder.save(as_text=False) tf.logging.info('Model saved to {}'.format(export_dir))
MIT License
adafruit/adafruit_blinka
src/bitbangio.py
I2C.scan
python
def scan(self): return self._i2c.scan()
Scan for attached devices
https://github.com/adafruit/adafruit_blinka/blob/619196a5b8c3fe01662d6db193d77c8333e6c9a5/src/bitbangio.py#L55-L57
import adafruit_platformdetect.constants.boards as ap_board from adafruit_blinka import Lockable, agnostic class I2C(Lockable): def __init__(self, scl, sda, frequency=400000): if agnostic.board_id == ap_board.PYBOARD: raise NotImplementedError("No software I2C on {}".format(agnostic.board_id)) if agnostic.detector.board.any_embedded_linux: raise NotImplementedError( "For bitbangio on Linux, please use Adafruit_CircuitPython_BitbangIO" ) self.init(scl, sda, frequency) def init(self, scl, sda, frequency): from machine import Pin from machine import I2C as _I2C self.deinit() id = ( -1 ) self._i2c = _I2C(id, Pin(scl.id), Pin(sda.id), freq=frequency) def deinit(self): try: del self._i2c except AttributeError: pass def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.deinit()
MIT License
jest-community/jest-pytest
src/__tests__/integration/home-assistant/homeassistant/components/tts/yandextts.py
YandexSpeechKitProvider.async_get_tts_audio
python
def async_get_tts_audio(self, message, language, options=None): websession = async_get_clientsession(self.hass) actual_language = language options = options or {} try: with async_timeout.timeout(10, loop=self.hass.loop): url_param = { 'text': message, 'lang': actual_language, 'key': self._key, 'speaker': options.get(CONF_VOICE, self._speaker), 'format': options.get(CONF_CODEC, self._codec), 'emotion': options.get(CONF_EMOTION, self._emotion), 'speed': options.get(CONF_SPEED, self._speed) } request = yield from websession.get( YANDEX_API_URL, params=url_param) if request.status != 200: _LOGGER.error("Error %d on load URL %s", request.status, request.url) return (None, None) data = yield from request.read() except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.error("Timeout for yandex speech kit API") return (None, None) return (self._codec, data)
Load TTS from yandex.
https://github.com/jest-community/jest-pytest/blob/b197b0b31e3ca5c411202d97583cbd2d2b0b92e9/src/__tests__/integration/home-assistant/homeassistant/components/tts/yandextts.py#L110-L141
import asyncio import logging import aiohttp import async_timeout import voluptuous as vol from homeassistant.const import CONF_API_KEY from homeassistant.components.tts import Provider, PLATFORM_SCHEMA, CONF_LANG from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) YANDEX_API_URL = "https://tts.voicetech.yandex.net/generate?" SUPPORT_LANGUAGES = [ 'ru-RU', 'en-US', 'tr-TR', 'uk-UK' ] SUPPORT_CODECS = [ 'mp3', 'wav', 'opus', ] SUPPORT_VOICES = [ 'jane', 'oksana', 'alyss', 'omazh', 'zahar', 'ermil' ] SUPPORTED_EMOTION = [ 'good', 'evil', 'neutral' ] MIN_SPEED = 0.1 MAX_SPEED = 3 CONF_CODEC = 'codec' CONF_VOICE = 'voice' CONF_EMOTION = 'emotion' CONF_SPEED = 'speed' DEFAULT_LANG = 'en-US' DEFAULT_CODEC = 'mp3' DEFAULT_VOICE = 'zahar' DEFAULT_EMOTION = 'neutral' DEFAULT_SPEED = 1 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_API_KEY): cv.string, vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES), vol.Optional(CONF_CODEC, default=DEFAULT_CODEC): vol.In(SUPPORT_CODECS), vol.Optional(CONF_VOICE, default=DEFAULT_VOICE): vol.In(SUPPORT_VOICES), vol.Optional(CONF_EMOTION, default=DEFAULT_EMOTION): vol.In(SUPPORTED_EMOTION), vol.Optional(CONF_SPEED, default=DEFAULT_SPEED): vol.Range(min=MIN_SPEED, max=MAX_SPEED) }) SUPPORTED_OPTIONS = [ CONF_CODEC, CONF_VOICE, CONF_EMOTION, CONF_SPEED, ] @asyncio.coroutine def async_get_engine(hass, config): return YandexSpeechKitProvider(hass, config) class YandexSpeechKitProvider(Provider): def __init__(self, hass, conf): self.hass = hass self._codec = conf.get(CONF_CODEC) self._key = conf.get(CONF_API_KEY) self._speaker = conf.get(CONF_VOICE) self._language = conf.get(CONF_LANG) self._emotion = conf.get(CONF_EMOTION) self._speed = str(conf.get(CONF_SPEED)) self.name = 'YandexTTS' @property def default_language(self): return self._language @property def supported_languages(self): return SUPPORT_LANGUAGES @property def supported_options(self): return SUPPORTED_OPTIONS @asyncio.coroutine
MIT License
hazyresearch/butterfly
butterfly/butterfly_multiply.py
ButterflyOrthoMultTied.forward
python
def forward(ctx, twiddle, input, increasing_stride=True): twiddle_cos, twiddle_sin = torch.cos(twiddle), torch.sin(twiddle) output = butterfly_ortho_multiply_tied(twiddle_cos, twiddle_sin, input, increasing_stride) ctx.save_for_backward(twiddle_cos, twiddle_sin, output) ctx._increasing_stride = increasing_stride return output
Parameters: twiddle: (nstack, n - 1) input: (batch_size, nstack, n) Returns: output: (batch_size, nstack, n)
https://github.com/hazyresearch/butterfly/blob/7217b5d93bc78e1229fed3761bcc70d943f604b7/butterfly/butterfly_multiply.py#L225-L237
import math import torch from torch import nn import torch.nn.functional as F from .complex_utils import complex_mul use_extension = True try: from factor_multiply import butterfly_multiply_intermediate, butterfly_multiply_intermediate_backward from factor_multiply import butterfly_multiply_untied, butterfly_multiply_untied_backward from factor_multiply import butterfly_multiply_untied_forward_backward from factor_multiply import butterfly_ortho_multiply_tied, butterfly_ortho_multiply_tied_backward from factor_multiply import butterfly_ortho_multiply_untied, butterfly_ortho_multiply_untied_backward from factor_multiply import bbt_multiply_untied, bbt_multiply_untied_forward_backward from factor_multiply import bbt_ortho_multiply_untied, bbt_ortho_multiply_untied_backward from factor_multiply import butterfly_factor_multiply, butterfly_factor_multiply_backward from factor_multiply import butterfly_conv2d, butterfly_conv2d_backward, butterfly_conv2d_forward_backward from factor_multiply import bbt_conv2d, bbt_conv2d_forward_backward from factor_multiply import butterfly_multiply_untied_eval import factor_multiply_fast as fmf from factor_multiply_fast import butterfly_multiply_untied_forward_fast from factor_multiply_fast import butterfly_multiply_untied_forward_backward_fast from factor_multiply_fast import butterfly_bbs_multiply_untied_forward_fast from factor_multiply_fast import butterfly_bbs_multiply_untied_forward_backward_fast from factor_multiply_fast import butterfly_odo_multiply_untied_forward_fast from factor_multiply_fast import butterfly_odo_multiply_untied_backward_fast from factor_multiply_fast import butterfly_odo_multiply_untied_forward_backward_fast except: use_extension = False import warnings warnings.warn("C++/CUDA extension isn't installed properly. Will use butterfly multiply implemented in Pytorch, which is much slower.") try: from apex import amp amp.register_float_function(fmf, 'butterfly_odo_multiply_untied_forward_fast') amp.register_float_function(fmf, 'butterfly_odo_multiply_untied_forward_backward_fast') except ImportError: raise ImportError("Please install apex from https://www.github.com/nvidia/apex.") def butterfly_mult_torch(twiddle, input, increasing_stride=True, return_intermediates=False): batch_size, nstack, n = input.shape[:3] assert twiddle.shape == (nstack, n - 1, 2, 2) if input.dim() == 2 else (nstack, n - 1, 2, 2, 2) m = int(math.log2(n)) assert n == 1 << m, "size must be a power of 2" if input.dim() == 3: output = input.contiguous() intermediates = [output] for log_stride in range(m) if increasing_stride else range(m)[::-1]: stride = 1 << log_stride t = twiddle[:, (stride - 1):(2 * stride - 1)].permute(0, 2, 3, 1) output_reshape = output.view(batch_size, nstack, n // (2 * stride), 1, 2, stride) output = (t.unsqueeze(1) * output_reshape).sum(dim=4) intermediates.append(output) return output.view(batch_size, nstack, n) if not return_intermediates else torch.stack([intermediate.view(batch_size, nstack, n) for intermediate in intermediates]) else: output = input.contiguous() intermediates = [output] for log_stride in range(m) if increasing_stride else range(m)[::-1]: stride = 1 << log_stride t = twiddle[:, (stride - 1):(2 * stride - 1)].permute(0, 2, 3, 1, 4) output_reshape = output.view(batch_size, nstack, n // (2 * stride), 1, 2, stride, 2) output = complex_mul(t.unsqueeze(1), output_reshape).sum(dim=4) intermediates.append(output) return output.view(batch_size, nstack, n, 2) if not return_intermediates else torch.stack([intermediate.view(batch_size, nstack, n, 2) for intermediate in intermediates]) class ButterflyMult(torch.autograd.Function): @staticmethod def forward(ctx, twiddle, input, increasing_stride=True): output = butterfly_multiply_intermediate(twiddle, input, increasing_stride, False) ctx.save_for_backward(twiddle, input) ctx._increasing_stride = increasing_stride return output @staticmethod def backward(ctx, grad): twiddle, input = ctx.saved_tensors increasing_stride = ctx._increasing_stride output_and_intermediate = butterfly_multiply_intermediate(twiddle, input, increasing_stride, True) d_coefficients, d_input = butterfly_multiply_intermediate_backward(grad, twiddle, output_and_intermediate, increasing_stride) return d_coefficients, d_input, None butterfly_mult = ButterflyMult.apply if use_extension else butterfly_mult_torch def butterfly_mult_untied_torch(twiddle, input, increasing_stride=True, return_intermediates=False): batch_size, nstack, n = input.shape[:3] m = int(math.log2(n)) assert n == 1 << m, "size must be a power of 2" assert twiddle.shape == (nstack, m, n // 2, 2, 2) if input.dim() == 3 else (nstack, m, n // 2, 2, 2, 2) if input.dim() == 3: output = input.contiguous() intermediates = [output] for log_stride in range(m) if increasing_stride else range(m)[::-1]: stride = 1 << log_stride t = twiddle[:, log_stride].view(nstack, n // (2 * stride), stride, 2, 2).permute(0, 1, 3, 4, 2) output_reshape = output.view(batch_size, nstack, n // (2 * stride), 1, 2, stride) output = (t * output_reshape).sum(dim=4) intermediates.append(output) return output.view(batch_size, nstack, n) if not return_intermediates else torch.stack([intermediate.view(batch_size, nstack, n) for intermediate in intermediates]) else: output = input.contiguous() intermediates = [output] for log_stride in range(m) if increasing_stride else range(m)[::-1]: stride = 1 << log_stride t = twiddle[:, log_stride].view(nstack, n // (2 * stride), stride, 2, 2, 2).permute(0, 1, 3, 4, 2, 5) output_reshape = output.view(batch_size, nstack, n // (2 * stride), 1, 2, stride, 2) output = complex_mul(t, output_reshape).sum(dim=4) intermediates.append(output) return output.view(batch_size, nstack, n, 2) if not return_intermediates else torch.stack([intermediate.view(batch_size, nstack, n, 2) for intermediate in intermediates]) class ButterflyMultUntied(torch.autograd.Function): @staticmethod def forward(ctx, twiddle, input, increasing_stride=True, is_training=True, fast=False): if not is_training and not input.is_cuda and input.dim() == 3 and input.dtype == torch.float and input.shape[-1] > 8: output = butterfly_multiply_untied_eval(twiddle, input, increasing_stride) else: if not fast: output = butterfly_multiply_untied(twiddle, input, increasing_stride, False) else: output = butterfly_multiply_untied_forward_fast(twiddle, input, increasing_stride) ctx.save_for_backward(twiddle, input) ctx._increasing_stride = increasing_stride ctx._fast = fast return output @staticmethod def backward(ctx, grad): twiddle, input = ctx.saved_tensors increasing_stride = ctx._increasing_stride fast = ctx._fast n = input.shape[2] if input.dim() == 3 and n <= 1024 and input.is_cuda: if not fast: d_coefficients, d_input = butterfly_multiply_untied_forward_backward(twiddle, input, grad, increasing_stride) else: d_coefficients, d_input = butterfly_multiply_untied_forward_backward_fast(twiddle, input, grad, increasing_stride) else: output_and_intermediate = butterfly_multiply_untied(twiddle, input, increasing_stride, True) d_coefficients, d_input = butterfly_multiply_untied_backward(grad, twiddle, output_and_intermediate, increasing_stride) return d_coefficients, d_input, None, None, None butterfly_mult_untied = ButterflyMultUntied.apply if use_extension else butterfly_mult_untied_torch class ButterflyOrthoMultTied(torch.autograd.Function): @staticmethod
Apache License 2.0
ucb-sts/sts
sts/control_flow/fuzzer.py
Fuzzer.check_pending_commands
python
def check_pending_commands(self): if self.delay_flow_mods: for switch in self.simulation.topology.switches: assert(isinstance(switch, FuzzSoftwareSwitch)) if switch.has_pending_commands() and (self.random.random() < self.params.ofp_cmd_passthrough_rate): (cmd, pending_receipt) = switch.get_next_command() eventclass = ProcessFlowMod b64_packet = base64_encode(cmd) self._log_input_event(eventclass(pending_receipt.dpid, pending_receipt.controller_id, pending_receipt.fingerprint, b64_packet=b64_packet)) switch.process_delayed_command(pending_receipt)
If Fuzzer is configured to delay flow mods, this decides whether each switch is allowed to process a buffered flow mod
https://github.com/ucb-sts/sts/blob/82190b7662523e3aaa21998a6a31d0878abe66c7/sts/control_flow/fuzzer.py#L466-L481
from sts.control_flow.interactive import Interactive from sts.topology import BufferedPatchPanel from sts.traffic_generator import TrafficGenerator from sts.replay_event import * from pox.lib.util import TimeoutError from pox.lib.packet.lldp import * from config.invariant_checks import name_to_invariant_check from sts.util.convenience import base64_encode from sts.entities import FuzzSoftwareSwitch, ControllerState from sts.openflow_buffer import OpenFlowBuffer from sts.control_flow.base import ControlFlow, RecordingSyncCallback import os import re import shutil import signal import sys import time import random import logging log = logging.getLogger("control_flow") class Fuzzer(ControlFlow): def __init__(self, simulation_cfg, fuzzer_params="config.fuzzer_params", check_interval=None, traffic_inject_interval=10, random_seed=None, delay=0.1, steps=None, input_logger=None, invariant_check_name="InvariantChecker.check_correspondence", halt_on_violation=False, log_invariant_checks=True, delay_startup=True, print_buffers=True, record_deterministic_values=False, mock_link_discovery=False, never_drop_whitelisted_packets=True, initialization_rounds=0, send_all_to_all=False): ControlFlow.__init__(self, simulation_cfg) self.sync_callback = RecordingSyncCallback(input_logger, record_deterministic_values=record_deterministic_values) self.check_interval = check_interval if self.check_interval is None: print >> sys.stderr, "Fuzzer Warning: Check interval is not specified... not checking invariants" if invariant_check_name not in name_to_invariant_check: raise ValueError('''Unknown invariant check %s.\n''' '''Invariant check name must be defined in config.invariant_checks''', invariant_check_name) self.invariant_check_name = invariant_check_name self.invariant_check = name_to_invariant_check[invariant_check_name] self.log_invariant_checks = log_invariant_checks self.traffic_inject_interval = traffic_inject_interval if random_seed is None: random_seed = random.randint(0, sys.maxint) self.random_seed = random_seed self.random = random.Random(random_seed) self.traffic_generator = TrafficGenerator(self.random) self.delay = delay self.steps = steps self.params = object() self._load_fuzzer_params(fuzzer_params) self._input_logger = input_logger self.halt_on_violation = halt_on_violation self.delay_startup = delay_startup self.print_buffers = print_buffers self.mock_link_discovery = mock_link_discovery self.initialization_rounds = initialization_rounds self._pending_self_packets = self.initialization_rounds != 0 self._pending_all_to_all = send_all_to_all self._all_to_all_iterations = 0 self._all_to_all_interval = 5 self.blocked_controller_pairs = [] self.unblocked_controller_pairs = [] self.logical_time = 0 self.never_drop_whitelisted_packets = never_drop_whitelisted_packets self.delay_flow_mods = False def _log_input_event(self, event, **kws): if self._input_logger is not None: if self._initializing(): event.prunable = False event.round = self.logical_time self._input_logger.log_input_event(event, **kws) def _load_fuzzer_params(self, fuzzer_params_path): if fuzzer_params_path.endswith('.py'): fuzzer_params_path = fuzzer_params_path[:-3].replace("/", ".") try: self.params = __import__(fuzzer_params_path, globals(), locals(), ["*"]) self.params.link_discovery_rate = 0.1 except: raise IOError("Could not find fuzzer params config file: %s" % fuzzer_params_path) def _compute_unblocked_controller_pairs(self): sorted_controllers = sorted(self.simulation.controller_manager.controllers, key=lambda c: c.cid) unblocked_pairs = [] for i in xrange(0, len(sorted_controllers)): for j in xrange(i+1, len(sorted_controllers)): c1 = sorted_controllers[i] c2 = sorted_controllers[j] c1.unblock_peer(c2) c2.unblock_peer(c1) unblocked_pairs.append((c1.cid, c2.cid)) return unblocked_pairs def init_results(self, results_dir): if self._input_logger: self._input_logger.open(results_dir) params_file = re.sub(r'\.pyc$', '.py', self.params.__file__) if os.path.exists(params_file): new_params_file = os.path.join(results_dir, os.path.basename(params_file)) if os.path.abspath(params_file) != os.path.abspath(new_params_file): shutil.copy(params_file, new_params_file) orig_config_path = os.path.join(results_dir, "orig_config.py") if os.path.exists(orig_config_path): with open(orig_config_path, "a") as out: out.write('''\nraise RuntimeError("Please add this parameter to Fuzzer: ''' '''fuzzer_params='%s'")''' % new_params_file) def _initializing(self): return self._pending_self_packets or self._pending_all_to_all def simulate(self): self.simulation = self.simulation_cfg.bootstrap(self.sync_callback) assert(isinstance(self.simulation.patch_panel, BufferedPatchPanel)) self.traffic_generator.set_topology(self.simulation.topology) self.unblocked_controller_pairs = self._compute_unblocked_controller_pairs() self.delay_flow_mods = self.params.ofp_cmd_passthrough_rate != 1.0 if self.delay_flow_mods: for switch in self.simulation.topology.switches: assert(isinstance(switch, FuzzSoftwareSwitch)) switch.use_delayed_commands() switch.randomize_flow_mods() return self.loop() def loop(self): if self.steps: end_time = self.logical_time + self.steps else: end_time = sys.maxint self.interrupted = False self.old_interrupt = None def interrupt(sgn, frame): msg.interactive("Interrupting fuzzer, dropping to console (press ^C again to terminate)") signal.signal(signal.SIGINT, self.old_interrupt) self.old_interrupt = None self.interrupted = True raise KeyboardInterrupt() self.old_interrupt = signal.signal(signal.SIGINT, interrupt) try: self._log_input_event(ConnectToControllers()) self.simulation.connect_to_controllers() if self.delay_startup: log.info("Waiting until first OpenFlow message received..") while len(self.simulation.openflow_buffer.pending_receives) == 0: self.simulation.io_master.select(self.delay) while self.logical_time < end_time: self.logical_time += 1 try: if not self._initializing(): self.trigger_events() halt = self.maybe_check_invariant() if halt: self.simulation.set_exit_code(5) break self.maybe_inject_trace_event() else: self.check_pending_messages(pass_through=True) if self.logical_time > self.initialization_rounds: if self._pending_self_packets: self._send_initialization_packets(send_to_self=True) self._pending_self_packets = False elif self._pending_all_to_all and (self.logical_time % self._all_to_all_interval) == 0: self._send_initialization_packets(send_to_self=False) self._all_to_all_iterations += 1 if self._all_to_all_iterations > len(self.simulation.topology.hosts): log.info("Done initializing") self._pending_all_to_all = False self.check_dataplane(pass_through=True) msg.event("Round %d completed." % self.logical_time) time.sleep(self.delay) except KeyboardInterrupt as e: if self.interrupted: interactive = Interactive(self.simulation_cfg, self._input_logger) interactive.simulate(self.simulation, bound_objects=( ('fuzzer', self), )) self.old_interrupt = signal.signal(signal.SIGINT, interrupt) else: raise e log.info("Terminating fuzzing after %d rounds" % self.logical_time) if self.print_buffers: self._print_buffers() finally: if self.old_interrupt: signal.signal(signal.SIGINT, self.old_interrupt) if self._input_logger is not None: self._input_logger.close(self, self.simulation_cfg) return self.simulation def _send_initialization_packet(self, host, send_to_self=False): traffic_type = "icmp_ping" if send_to_self else "arp_query" (dp_event, send) = self.traffic_generator.generate(traffic_type, host, send_to_self=send_to_self) self._log_input_event(TrafficInjection(dp_event=dp_event, host_id=host.hid)) send() def _send_initialization_packets(self, send_to_self=False): for host in self.simulation.topology.hosts: self._send_initialization_packet(host, send_to_self=send_to_self) def _print_buffers(self): buffered_events = [] log.info("Pending Receives:") of_buf = self.simulation.openflow_buffer for (dpid, controller_id) in of_buf.conns_with_pending_receives(): for p in of_buf.get_pending_receives(dpid, controller_id): log.info("- %r", p) message = of_buf.get_message_receipt(p) b64_packet = base64_encode(message) event = ControlMessageReceive(p.dpid, p.controller_id, p.fingerprint, b64_packet=b64_packet) buffered_events.append(event) log.info("Pending Sends:") for (dpid, controller_id) in of_buf.conns_with_pending_sends(): for p in of_buf.get_pending_sends(dpid, controller_id): log.info("- %r", p) message = of_buf.get_message_send(p) b64_packet = base64_encode(message) event = ControlMessageSend(p.dpid, p.controller_id, p.fingerprint, b64_packet=b64_packet) buffered_events.append(event) if self._input_logger is not None: self._input_logger.dump_buffered_events(buffered_events) def maybe_check_invariant(self): if (self.check_interval is not None and (self.logical_time % self.check_interval) == 0): def do_invariant_check(): if self.log_invariant_checks: self._log_input_event(CheckInvariants(round=self.logical_time, invariant_check_name=self.invariant_check_name)) violations = self.invariant_check(self.simulation) self.simulation.violation_tracker.track(violations, self.logical_time) persistent_violations = self.simulation.violation_tracker.persistent_violations transient_violations = list(set(violations) - set(persistent_violations)) if violations != []: msg.fail("The following correctness violations have occurred: %s" % str(violations)) else: msg.success("No correctness violations!") if transient_violations != []: self._log_input_event(InvariantViolation(transient_violations)) if persistent_violations != []: msg.fail("Persistent violations detected!: %s" % str(persistent_violations)) self._log_input_event(InvariantViolation(persistent_violations, persistent=True)) if self.halt_on_violation: return True return do_invariant_check() def maybe_inject_trace_event(self): if (self.simulation.dataplane_trace and (self.logical_time % self.traffic_inject_interval) == 0): (dp_event, host) = self.simulation.dataplane_trace.peek() if dp_event is not None: self._log_input_event(TrafficInjection(dp_event=dp_event, host_id=host.hid)) self.simulation.dataplane_trace.inject_trace_event() def trigger_events(self): self.check_dataplane() self.check_tcp_connections() self.check_pending_messages() self.check_pending_commands() self.check_switch_crashes() self.check_link_failures() self.fuzz_traffic() self.check_controllers() self.check_migrations() self.check_intracontroller_blocks() def check_dataplane(self, pass_through=False): def drop(dp_event, log_event=True): if log_event: self._log_input_event(DataplaneDrop(dp_event.fingerprint, host_id=dp_event.get_host_id(), dpid=dp_event.get_switch_id())) self.simulation.patch_panel.drop_dp_event(dp_event) def permit(dp_event): self._log_input_event(DataplanePermit(dp_event.fingerprint)) self.simulation.patch_panel.permit_dp_event(dp_event) def in_whitelist(dp_event): return (self.never_drop_whitelisted_packets and OpenFlowBuffer.in_whitelist(dp_event.fingerprint[0])) for dp_event in self.simulation.patch_panel.queued_dataplane_events: if pass_through: permit(dp_event) elif not self.simulation.topology.ok_to_send(dp_event): drop(dp_event, log_event=False) elif (self.random.random() >= self.params.dataplane_drop_rate or in_whitelist(dp_event)): permit(dp_event) else: drop(dp_event) if self.mock_link_discovery and self.random.random() < self.params.link_discovery_rate: link = self.random.choice(self.simulation.topology.network_links) attrs = [link.start_software_switch.dpid, link.start_port.port_no, link.end_software_switch.dpid, link.end_port.port_no] live_controllers = self.simulation.controller_manager.live_controllers if live_controllers != []: c = self.random.choice(list(live_controllers)) self._log_input_event(LinkDiscovery(c.cid, attrs)) c.sync_connection.send_link_notification(attrs) def check_tcp_connections(self): for (switch, connection) in self.simulation.topology.unblocked_controller_connections: if self.random.random() < self.params.controlplane_block_rate: self._log_input_event(ControlChannelBlock(switch.dpid, connection.get_controller_id())) self.simulation.topology.block_connection(connection) for (switch, connection) in self.simulation.topology.blocked_controller_connections: if self.random.random() < self.params.controlplane_unblock_rate: self._log_input_event(ControlChannelUnblock(switch.dpid, controller_id=connection.get_controller_id())) self.simulation.topology.unblock_connection(connection) def check_pending_messages(self, pass_through=False): of_buf = self.simulation.openflow_buffer for (dpid, controller_id) in of_buf.conns_with_pending_receives(): for pending_receipt in of_buf.get_pending_receives(dpid, controller_id): if (not pass_through and self.random.random() > self.params.ofp_message_receipt_rate): break message = of_buf.get_message_receipt(pending_receipt) b64_packet = base64_encode(message) self._log_input_event(ControlMessageReceive(pending_receipt.dpid, pending_receipt.controller_id, pending_receipt.fingerprint, b64_packet=b64_packet)) of_buf.schedule(pending_receipt) for (dpid, controller_id) in of_buf.conns_with_pending_sends(): for pending_send in of_buf.get_pending_sends(dpid, controller_id): if (not pass_through and self.random.random() > self.params.ofp_message_send_rate): break message = of_buf.get_message_send(pending_send) b64_packet = base64_encode(message) self._log_input_event(ControlMessageSend(pending_send.dpid, pending_send.controller_id, pending_send.fingerprint, b64_packet=b64_packet)) of_buf.schedule(pending_send)
Apache License 2.0
blurstudio/cross3d
cross3d/abstract/abstractcontainer.py
AbstractContainer.clearMaterialOverrideFlags
python
def clearMaterialOverrideFlags(self): self._materialOverrideFlags = 0 return True
Return whether or not the inputed flag is set in the override options :return: bool
https://github.com/blurstudio/cross3d/blob/277968d1227de740fc87ef61005c75034420eadf/cross3d/abstract/abstractcontainer.py#L230-L238
import cross3d from cross3d import SceneWrapper, abstractmethod from cross3d.constants import ObjectType class AbstractContainer(SceneWrapper): def __init__(self, scene, nativeGroup): SceneWrapper.__init__(self, scene, nativeGroup) self._materialOverride = None self._materialOverrideFlags = 0 self._materialOverrideAdvancedState = {} self._propSetOverride = None @abstractmethod def _addNativeAtmospherics(self, nativeAtmospherics): return False @abstractmethod def _addNativeFx(self, nativeFx): return False @abstractmethod def _addNativeObjects(self, nativeObjects): return False def _clearNativeMaterialOverride(self): return self._scene._clearNativeMaterialOverride(self._nativeObjects()) def _clearNativePropSetOverride(self): return self._scene._clearNativePropSetOverride(self._nativeObjects()) @abstractmethod def _nativeAtmospherics(self): return [] @abstractmethod def _nativeFxs(self): return [] @abstractmethod def _nativeObjects(self): return [] @abstractmethod def _nativeMaterials(self, baseMaterials=False): return [] @abstractmethod def _nativeMaterialOverride(self): return None @abstractmethod def _setNativeAtmospherics(self, nativeAtmospherics): return False @abstractmethod def _setNativeFxs(self, nativeFxs): return False def _setNativeMaterialOverride(self, nativeMaterial, options= -1, advancedState=None): if (options == -1): options = self.materialOverrideFlags() else: self.setMaterialOverrideFlags(options) if (advancedState == None): advancedState = self.materialOverrideAdvancedState() else: self.setMaterialOverrideAdvancedState(advancedState) return self._scene._setNativeMaterialOverride(self._nativeObjects(), nativeMaterial, options=options, advancedState=advancedState) def _setNativePropSetOverride(self, nativePropSet): if (nativePropSet): return self._scene._setNativePropSetOverride(self._nativeObjects(), nativePropSet) else: return self._scene._clearNativePropSetOverride(self._nativeObjects()) def addAtmospherics(self, atmospherics): return self._addNativeAtmospherics([ atmos.nativePointer() for atmos in atmospherics ]) def addFxs(self, fxs): return self._addNativeFxs([ fx.nativePointer() for fx in fxs ]) def addObjects(self, objects): return self._addNativeObjects([ object.nativePointer() for object in objects ]) def addSelection(self): return self._addNativeObjects(self._scene._nativeSelection()) def atmospherics(self): from cross3d import SceneAtmospheric return [ SceneAtmospheric(self._scene, atmos) for atmos in self._nativeAtmospherics() ] def clearMaterialOverride(self): return self._clearNativeMaterialOverride()
MIT License
odatnurd/overrideaudit
lib/utils.py
SettingsGroup.has
python
def has(self, view): for key in self.key_list: if not view.settings().has(key): return False return True
Check if the settings in this group are all set on the provided view.
https://github.com/odatnurd/overrideaudit/blob/2e1601753af1b4ffc57e362bf8423c2db0c6d317/lib/utils.py#L41-L48
import sublime import threading class SettingsGroup(): def __init__(self, *settings): self.key_list = list(settings) def apply(self, view, *values): if len(values) != len(self.key_list): raise IndexError("Expected %d settings" % len(self.key_list)) for setting in zip(self.key_list, values): view.settings().set(*setting) def remove(self, view): for setting in self.key_list: view.settings().erase(setting)
MIT License
jollejolles/pirecorder
pirecorder/pirecorder.py
PiRecorder.schedule
python
def schedule(self, jobname = None, timeplan = None, enable = True, showjobs = False, delete = None, test = False): S = Schedule(jobname, timeplan, enable, showjobs, delete, test, logfolder = self.logfolder, internal=True, configfile = self.configfilerel)
Schedule future recordings Parameters ---------- jobname : str, default = None Name for the scheduled recorder task to create, modify or remove. timeplan : string, default = None Code string representing the time planning for the recorder to run with current configuration set. Build on CRON, the time plan should consist of the following parts: * * * * * - - - - - | | | | | | | | | +----- day of week (0 - 7) (sunday = 0 or 7) | | | +---------- month (1 - 12) | | +--------------- day of month (1 - 31) | +-------------------- hour (0 - 23) +------------------------- min (0 - 59) Each of the parts supports wildcards (*), ranges (2-5), and lists (2,5,6,11). For example, if you want to schedule a recording at 22:00, every workday of the week, enter the code '0 22 * * 1-5' If uncertain, crontab.guru is a great website for checking your CRON code. Note that the minimum time between subsequent scheduled recordings is 1 minute. Smaller intervals between recordings is possible for images with the imgseq command with the Record method. enable : bool, default = None If the scheduled job should be enabled or not. showjobs : bool, default = False If the differently timed tasks should be shown or not. delete : [None, "job", "all"], default = None If a specific job ('job'), all jobs ('all') or no jobs (None) should be cleared from the scheduler. test : bool; default = False Determine if the timeplan is valid and how often it will run the record command. configfile : str, default = "pirecorder.conf" The name of the configuration file to be used for the scheduled recordings. Make sure the file exists, otherwise the default configuration settings will be used. Note: Make sure Recorder configuration timing settings are within the timespan between subsequent scheduled recordings based on the provided timeplan. For example, a video duration of 20 min and a scheduled recording every 15 min between 13:00-16:00 (*/15 13-16 * * *) will fail. This will be checked automatically.
https://github.com/jollejolles/pirecorder/blob/96cc15f86ccaa087ed2b7649ea4ac4d95a11aebe/pirecorder/pirecorder.py#L507-L560
from __future__ import print_function from builtins import input import os import cv2 import sys import yaml import argparse import numpy as np from io import BytesIO from ast import literal_eval from datetime import datetime from socket import gethostname from fractions import Fraction from time import sleep, strftime from localconfig import LocalConfig from pythutils.sysutils import Logger, lineprint, homedir, checkfrac, isrpi from pythutils.fileutils import name from pythutils.mediautils import picamconv from .stream import Stream from .camconfig import Camconfig from .schedule import Schedule from .__version__ import __version__ class PiRecorder: def __init__(self, configfile = "pirecorder.conf", logging = True): if not isrpi(): lineprint("PiRecorder only works on a raspberry pi. Exiting..") return self.system = "auto" self.host = gethostname() self.home = homedir() self.setupdir = self.home + "pirecorder" self.logfolder = self.setupdir+"/logs/" if not os.path.exists(self.logfolder): os.makedirs(self.setupdir) os.makedirs(self.logfolder) lineprint("Setup folder created ("+self.setupdir+")..") if not os.path.exists(self.logfolder): lineprint("Setup folder exists but was not set up properly..") if logging: self.log = Logger(self.logfolder+"/pirecorder.log") self.log.start() print("") lineprint("pirecorder "+__version__+" started!") lineprint("="*47, False) self.brightfile = self.setupdir+"/cusbright.yml" self.configfilerel = configfile self.configfile = self.setupdir+"/"+configfile self.config = LocalConfig(self.configfile, compact_form = True) if not os.path.isfile(self.configfile): lineprint("Config file "+configfile+" not found, new file created..") for section in ["rec","cam","cus","img","vid"]: if section not in list(self.config): self.config.add_section(section) self.settings(recdir="pirecorder/recordings",subdirs=False, label="test",rectype="img",rotation=0,brighttune=0, roi=None,gains=(1.0,2.5),brightness=45,contrast=10, saturation=0,iso=200,sharpness=0,compensation=0, shutterspeed=8000,imgdims=(2592,1944),maxres=None, viddims=(1640,1232),imgfps=1,vidfps=24,imgwait=5.0, imgnr=12,imgtime=60,imgquality=50,vidduration=10, viddelay=10,vidquality=11,automode=True,internal="") lineprint("Config settings stored..") else: lineprint("Config file "+configfile+" loaded..") lineprint("Recording " + self.config.rec.rectype + " in " + self.home + self.config.rec.recdir) self._imgparams() self._shuttertofps() if self.config.rec.rectype == "imgseq": if self.config.cam.shutterspeed/1000000.>=(self.config.img.imgwait/5): lineprint("imgwait is not enough for provided shutterspeed" + ", will be overwritten..") if self.config.rec.recdir == "NAS": if not os.path.ismount(self.config.rec.recdir): self.recdir = self.home lineprint("Recdir not mounted, storing in home directory..") self.recdir = self.home + self.config.rec.recdir if not os.path.exists(self.recdir): os.makedirs(self.recdir) os.chdir(self.recdir) def _setup_cam(self, auto = False, fps = None): import picamera import picamera.array self.cam = picamera.PiCamera() self.cam.rotation = self.config.cus.rotation self.cam.exposure_compensation = self.config.cam.compensation if self.config.rec.rectype in ["img","imgseq"]: self.cam.resolution = literal_eval(self.config.img.imgdims) self.cam.framerate = self.config.img.imgfps if self.config.rec.rectype in ["vid","vidseq"]: self.cam.resolution = picamconv(literal_eval(self.config.vid.viddims)) self.cam.framerate = self.config.vid.vidfps if fps != None: self.cam.framerate = fps if self.config.cus.roi is None: self.cam.zoom = (0,0,1,1) self.resize = self.cam.resolution else: self.cam.zoom = literal_eval(self.config.cus.roi) w = int(self.cam.resolution[0]*self.cam.zoom[2]) h = int(self.cam.resolution[1]*self.cam.zoom[3]) if self.config.rec.rectype in ["vid","vidseq"]: self.resize = picamconv((w,h)) else: self.resize = (w,h) self.longexpo = False if self.cam.framerate >= 6 else True self.cam.exposure_mode = "auto" self.cam.awb_mode = "auto" lineprint("Camera warming up..") if auto or self.config.cam.automode: self.cam.shutter_speed = 0 sleep(2) elif self.cam.framerate >= 6: sleep(6) if self.cam.framerate > 1.6 else sleep(10) else: sleep(2) if not auto and self.config.cam.automode == False: self.cam.shutter_speed = self.config.cam.shutterspeed self.cam.exposure_mode = "off" self.cam.awb_mode = "off" self.cam.awb_gains = eval(self.config.cus.gains) sleep(0.1) brightness = self.config.cam.brightness + self.config.cus.brighttune self.cam.brightness = brightness self.cam.contrast = self.config.cam.contrast self.cam.saturation = self.config.cam.saturation self.cam.iso = self.config.cam.iso self.cam.sharpness = self.config.cam.sharpness self.rawCapture = picamera.array.PiRGBArray(self.cam, size=self.cam.resolution) def _imgparams(self, mintime = 0.45): self.config.img.imgwait = max(mintime, self.config.img.imgwait) totimg = int(self.config.img.imgtime / self.config.img.imgwait) self.config.img.imgnr = min(self.config.img.imgnr, totimg) def _shuttertofps(self, minfps = 1, maxfps = 40): fps = round(1./(self.config.cam.shutterspeed/1000000.),2) self.config.img.imgfps = min(max(fps, minfps), maxfps) def _namefile(self): imgtypes = ["img","imgseq"] self.filetype = ".jpg" if self.config.rec.rectype in imgtypes else ".h264" if self.config.rec.rectype == "imgseq": date = strftime("%y%m%d") counter = "im{counter:05d}" if self.config.img.imgnr>999 else "im{counter:03d}" time = "{timestamp:%H%M%S}" self.filename = "_".join([self.config.rec.label,date,self.host,counter,time]) self.filename = self.filename+self.filetype else: date = strftime("%y%m%d") self.filename = "_".join([self.config.rec.label, date, self.host])+"_" if self.config.rec.subdirs: subdir = name("_".join([self.config.rec.label,date,self.host])) os.makedirs(subdir, exist_ok=True) self.filename = subdir+"/"+self.filename def autoconfig(self): self._setup_cam(auto=True) with self.rawCapture as stream: for a in range(5): self.cam.capture(stream, format="bgr", use_video_port=True) image = stream.array stream.seek(0) stream.truncate() self.config.cam.shutterspeed = self.cam.exposure_speed self.config.cus.gains = tuple([round(float(i),2) for i in self.cam.awb_gains]) self.config.save() lineprint("Shutterspeed set to "+str(self.cam.exposure_speed)) lineprint("White balance gains set to "+str(self.config.cus.gains)) stream.close() self.rawCapture.close() self.cam.close() def settings(self, **kwargs): if "recdir" in kwargs: self.config.rec.recdir = kwargs["recdir"] if "subdirs" in kwargs: self.config.rec.subdirs = kwargs["subdirs"] if "label" in kwargs: self.config.rec.label = kwargs["label"] if "rectype" in kwargs: self.config.rec.rectype = kwargs["rectype"] if "maxres" in kwargs: self.config.rec.maxres = kwargs["maxres"] if isinstance(self.config.rec.maxres, tuple): self.config.img.imgdims = self.config.rec.maxres elif self.config.rec.maxres == "v2": self.config.img.imgdims = (3264,2464) elif self.config.rec.maxres == "hq": self.config.img.imgdims = (4056,3040) if "rotation" in kwargs: self.config.cus.rotation = kwargs["rotation"] if "brighttune" in kwargs: self.config.cus.brighttune = kwargs["brighttune"] if "roi" in kwargs: self.config.cus.roi = kwargs["roi"] if "gains" in kwargs: self.config.cus.gains = kwargs["gains"] if "automode" in kwargs: self.config.cam.automode = kwargs["automode"] if "brightness" in kwargs: self.config.cam.brightness = kwargs["brightness"] if "contrast" in kwargs: self.config.cam.contrast = kwargs["contrast"] if "saturation" in kwargs: self.config.cam.saturation = kwargs["saturation"] if "iso" in kwargs: self.config.cam.iso = kwargs["iso"] if "sharpness" in kwargs: self.config.cam.sharpness = kwargs["sharpness"] if "compensation" in kwargs: self.config.cam.compensation = kwargs["compensation"] if "shutterspeed" in kwargs: self.config.cam.shutterspeed = kwargs["shutterspeed"] if "imgdims" in kwargs: self.config.img.imgdims = kwargs["imgdims"] if "viddims" in kwargs: self.config.vid.viddims = kwargs["viddims"] if "imgfps" in kwargs: self.config.img.imgfps = kwargs["imgfps"] if "vidfps" in kwargs: self.config.vid.vidfps = kwargs["vidfps"] if "imgwait" in kwargs: self.config.img.imgwait = kwargs["imgwait"] if "imgnr" in kwargs: self.config.img.imgnr = kwargs["imgnr"] if "imgtime" in kwargs: self.config.img.imgtime = kwargs["imgtime"] if "imgquality" in kwargs: self.config.img.imgquality = kwargs["imgquality"] if "vidduration" in kwargs: self.config.vid.vidduration = kwargs["vidduration"] if "viddelay" in kwargs: self.config.vid.viddelay = kwargs["viddelay"] if "vidquality" in kwargs: self.config.vid.vidquality = kwargs["vidquality"] brightchange = False if os.path.exists(self.brightfile): with open(self.brightfile) as f: brighttune = yaml.load(f, Loader=yaml.FullLoader) if brighttune != self.config.cus.brighttune: self.config.cus.brighttune = brighttune brightchange = True if len(kwargs) > 0 or brightchange: self._imgparams() self._shuttertofps() if self.config.rec.rectype == "imgseq": if self.config.cam.shutterspeed/1000000. >= (self.config.img.imgwait/5): lineprint("imgwait is not enough for provided shutterspeed" + ", will be overwritten..") self.config.save() if "internal" not in kwargs: lineprint("Config settings stored and loaded..") def stream(self, fps = None): lineprint("Opening stream for cam positioning and roi extraction..") vidstream = Stream(internal=True, rotation=self.config.cus.rotation, maxres=self.config.rec.maxres) if vidstream.roi: self.settings(roi=vidstream.roi, internal="") lineprint("Roi stored..") else: lineprint("No roi selected..") def camconfig(self, fps=None, vidsize=0.4): lineprint("Opening stream for interactive configuration..") fps = max(self.config.vid.vidfps,1) if fps==None else int(fps) self._setup_cam(fps=fps) configout = Camconfig(self.cam, auto=self.config.cam.automode, vidsize=vidsize) if len(configout)>0: self.settings(**configout)
Apache License 2.0
bitmovin/bitmovin-api-sdk-python
bitmovin_api_sdk/models/marlin_drm.py
MarlinDrm.kid
python
def kid(self): return self._kid
Gets the kid of this MarlinDrm. 16 byte key in hex (32 characters) (required) :return: The kid of this MarlinDrm. :rtype: string_types
https://github.com/bitmovin/bitmovin-api-sdk-python/blob/79dd938804197151af7cbe5501c7ec1d97872c15/bitmovin_api_sdk/models/marlin_drm.py#L92-L101
from enum import Enum from six import string_types, iteritems from bitmovin_api_sdk.common.poscheck import poscheck_model from bitmovin_api_sdk.models.drm import Drm import pprint import six class MarlinDrm(Drm): @poscheck_model def __init__(self, id_=None, name=None, description=None, created_at=None, modified_at=None, custom_data=None, outputs=None, key=None, kid=None): super(MarlinDrm, self).__init__(id_=id_, name=name, description=description, created_at=created_at, modified_at=modified_at, custom_data=custom_data, outputs=outputs) self._key = None self._kid = None self.discriminator = None if key is not None: self.key = key if kid is not None: self.kid = kid @property def openapi_types(self): types = {} if hasattr(super(MarlinDrm, self), 'openapi_types'): types = getattr(super(MarlinDrm, self), 'openapi_types') types.update({ 'key': 'string_types', 'kid': 'string_types' }) return types @property def attribute_map(self): attributes = {} if hasattr(super(MarlinDrm, self), 'attribute_map'): attributes = getattr(super(MarlinDrm, self), 'attribute_map') attributes.update({ 'key': 'key', 'kid': 'kid' }) return attributes @property def key(self): return self._key @key.setter def key(self, key): if key is not None: if not isinstance(key, string_types): raise TypeError("Invalid type for `key`, type has to be `string_types`") self._key = key @property
MIT License
getnikola/plugins
v7/latex/latex/__init__.py
LaTeXContext.get_name
python
def get_name(self): return '(unknown:{0})'.format(self.id) if self.name is None else self.name
Return name associated to context.
https://github.com/getnikola/plugins/blob/afafcec8a1530ee74dadfbe68ffa190b12a5a622/v7/latex/latex/__init__.py#L64-L66
from __future__ import unicode_literals import os import io import nikola.plugin_categories import nikola.utils import re import json from . import parser, htmlify LOGGER = nikola.utils.get_logger('compile_latex', nikola.utils.STDERR_HANDLER) class LaTeXContext(object): id = None def __init__(self, id, lang, thm_names, name=None): self.id = id self.name = name self.lang = lang self.thm_names = thm_names self.__file_deps_fragment = set() self.__file_deps_page = set() self.__uptodate_deps_fragment = list() self.__uptodate_deps_page = list() self.__plugin_data = {} self.__link_providers = []
MIT License
xknx/xknx
xknx/remote_value/remote_value_switch.py
RemoteValueSwitch.payload_valid
python
def payload_valid(self, payload: DPTArray | DPTBinary | None) -> DPTBinary | None: return payload if isinstance(payload, DPTBinary) else None
Test if telegram payload may be parsed.
https://github.com/xknx/xknx/blob/87666cc9bd9da64a84305baeff84486097346111/xknx/remote_value/remote_value_switch.py#L45-L47
from __future__ import annotations from typing import TYPE_CHECKING from xknx.dpt import DPTArray, DPTBinary from xknx.exceptions import ConversionError, CouldNotParseTelegram from .remote_value import AsyncCallbackType, GroupAddressesType, RemoteValue if TYPE_CHECKING: from xknx.xknx import XKNX class RemoteValueSwitch(RemoteValue[DPTBinary, bool]): def __init__( self, xknx: XKNX, group_address: GroupAddressesType | None = None, group_address_state: GroupAddressesType | None = None, sync_state: bool | int | float | str = True, device_name: str | None = None, feature_name: str = "State", after_update_cb: AsyncCallbackType | None = None, invert: bool = False, ): super().__init__( xknx, group_address, group_address_state, sync_state=sync_state, device_name=device_name, feature_name=feature_name, after_update_cb=after_update_cb, ) self.invert = bool(invert)
MIT License
mecha-karen/cake
cake/parsing/expression.py
Expression.append
python
def append(self, expr: typing.Union[str, "Expression"]) -> None: if isinstance(expr, Expression): expr = Expression.expression self.__expression += expr
Add a new expression onto your current expression .. code-block:: py >>> from cake import Expression >>> y = Expression("2x + 2") >>> y.append(" * 2") >>> y 2x + 2 * 2 Parameters ---------- expr: :class:`~typing.Union[str, Expression]` The expression to append
https://github.com/mecha-karen/cake/blob/f7bd11a137616c477afbf4d0121edb60b6aeea9a/cake/parsing/expression.py#L699-L718
import re import typing import string from cake import abc, errors import cake from ..core.markers import Operator, Symbol, PlusOrMinus, FunctionMarker from ..core.types.complex import Complex from ..core.types.irrational import Irrational from ..core.unknown import Unknown from .equation import Equation from ._ast import * from cake.helpers import convert_type from tokenize import ( tokenize, ENDMARKER, NEWLINE, ENCODING, OP, NAME, NUMBER, ERRORTOKEN, ) from io import BytesIO ASCII_CHARS = list(string.ascii_lowercase) BLACKLISTED = list(abc.KEYWORDS.keys()) + list(abc.CONSTANTS.keys()) VALID_SYMBOLS = {"!", "(", ")"} IGNORE = (ENDMARKER, NEWLINE, ENCODING) FIND_UNKNOWNS = re.compile("[a-zA-Z]+", re.IGNORECASE) INVALID_OPS = re.compile("[a-zA-Z]+[0-9]+", re.IGNORECASE) subExecGlobals = {'math': __import__('math'), 'cake': __import__('cake')} class Expression(object): def __new__( cls, expression: typing.Union[str, list], *default_args, **default_kwargs ): multiple = expression.split("\n") if type(expression) == str else expression if multiple == Ellipsis: multiple = "" if len(multiple) > 1: eqs = list() for eq in multiple: eqs.append(Expression(eq, *default_args, **default_kwargs)) return eqs return super(Expression, cls).__new__( Expression, *default_args, **default_kwargs ) def __init__( self, expression: typing.Union[str, typing.BinaryIO], *default_args, **default_kwargs, ) -> None: default_args = list(default_args) if hasattr(expression, "seek"): self.__expression = expression.read().decode( encoding="ASCII", errors="ignore" ) else: if expression == Ellipsis: expression = "" self.__expression = expression.lower() self.args = default_args self.kwargs = default_kwargs self.__mappings = self._sort_values(*default_args, **default_kwargs) def _sort_values(self, *args, **kwargs) -> dict: unknowns = FIND_UNKNOWNS.findall(self.__expression) for value in unknowns.copy(): if value in BLACKLISTED: unknowns.remove(value) as_dict = {i: None for i in unknowns} keys = list(as_dict.keys()) if not keys: return {} current_key = keys[0] current_index = 0 for arg in args: if (current_index + 1) > len(keys): break as_dict[current_key] = arg current_index += 1 for key, value in kwargs.items(): if key in as_dict: as_dict[key] = value as_dict = {k: v for k, v in as_dict.items() if v != None} return as_dict def _sub( self, update_mappings: bool = False, return_tokens: bool = False, *args, **kwargs, ): if update_mappings: self.update_variables(*args, **kwargs) unknown_mapping = self.__mappings else: unknown_mapping = self.update_variables(False, *args, **kwargs) invalid = INVALID_OPS.findall(self.expression) if invalid: found_first = invalid[0] index = 0 while True: if index == len(found_first): break if found_first[index].isdigit(): break index += 1 possible_correct = found_first[:index] possible_correct += " " possible_correct += found_first[index:] raise errors.SubstitutionError( f'String `{found_first}`, followed by integer. Perhaps you ment "{possible_correct}"' ) as_file = BytesIO(self.expression.encode(encoding="ASCII", errors="ignore")) as_file.seek(0) tokens = list(tokenize(as_file.readline)) if not tokens: return [] if tokens[0].type == ENCODING: tokens.pop(0) presence = list() OPEN_BRACKETS = 0 ACTUAL_INDEX = 0 TOKEN_INDEX = 0 SKIP = 0 while True: if TOKEN_INDEX > (len(tokens) - 1): break if SKIP: SKIP -= 1 TOKEN_INDEX += 1 continue token = tokens[TOKEN_INDEX] string = (token.string).lower() type_ = token.type if type_ in IGNORE: pass elif type_ == OP: if string == "(": POS_TOKENS = tokens[TOKEN_INDEX:] is_plus = POS_TOKENS[0:5] to_mapping = "".join([i.string for i in is_plus]) if to_mapping in ["(+|-)", "(-|+)"]: SKIP += 4 presence.append(PlusOrMinus()) else: try: comp = Complex(raw=to_mapping) presence.append(comp) SKIP += 4 except ValueError: presence.append(Symbol("(")) OPEN_BRACKETS += 1 elif string == ")": if OPEN_BRACKETS < 1: INCORRECT_BRACK_INDEX = token.start[1] raise errors.SubstitutionError( f"Unexpected `)` at index {INCORRECT_BRACK_INDEX}" ) presence.append(Symbol(")")) OPEN_BRACKETS -= 1 else: string_ = abc.MAP_OPERATORS.get(string, string) try: op = Operator(string_) presence.append(op) except ValueError as e: raise errors.SubstitutionError( f"Unknown Operator: {string}" ) from e elif type_ in [NAME, ERRORTOKEN]: TK_INDEX = (TOKEN_INDEX + 1) TOKENS = list() while True: if TK_INDEX > len(tokens): break tk = tokens[TK_INDEX] if tk.type == NAME: TOKENS.append(tk) SKIP += 1 else: break TK_INDEX += 1 if TOKENS: string = ' '.join(map(lambda _: _.string, TOKENS)) constant = abc.CONSTANTS.get(string) function = abc.KEYWORDS.get(string) symbol_func = abc.SYMBOL_KW.get(string) map_op = abc.MAP_OPERATORS.get(string) if len([i for i in (constant, function, symbol_func, map_op) if i is not None]) > 1: raise errors.SubstitutionError( f"{string} is defined as multiple keywords" ) elif constant: presence.append(Irrational(constant)) elif function: POS_TOKENS = tokens[(TOKEN_INDEX + 1) :] if not POS_TOKENS: raise errors.SubstitutionError( f"{string} Called with no parameters" ) if POS_TOKENS[0].string == "(": WRAPPED_IN_BRACKETS = True else: WRAPPED_IN_BRACKETS = False if not WRAPPED_IN_BRACKETS: _, COL = POS_TOKENS[0].start EQ = self.expression[COL:] EVALUATE = EQ.split(" ")[0] TREE, TOKENS = Expression(EVALUATE)._sub( return_tokens=True, **self._sort_values(*args, **kwargs) ) else: FUNQ_EQ = "" BRACKS = 0 for POSFIX in POS_TOKENS: if POSFIX.string == "(": BRACKS += 1 FUNQ_EQ += " ( " elif POSFIX.string == ")": if BRACKS < 1: OPEN_BRACKETS -= 1 presence.append(Symbol(")")) break BRACKS -= 1 FUNQ_EQ += " ) " else: FUNQ_EQ += f" {POSFIX.string} " if BRACKS > 1: raise errors.SubstitutionError( f"{BRACKS} Unclosed brackets whilst evaluating {function.__qualname__}" ) TREE, TOKENS = Expression(FUNQ_EQ)._sub( return_tokens=True, **self._sort_values(*args, **kwargs) ) if not TREE: raise errors.SubstitutionError( f"{string} Called with no parameters" ) func = FunctionMarker(function, TREE) SKIP += len(TOKENS) presence.append(func) elif symbol_func: LAST_POSFIX = presence[-1] func_name = symbol_func.__qualname__.title() if isinstance(LAST_POSFIX, Operator): raise errors.SubstitutionError( f"{func_name} called on an operator ({LAST_POSFIX.value}), at index {token.start[1]}." ) if isinstance(LAST_POSFIX, Symbol): if LAST_POSFIX.value != ")": raise errors.SubstitutionError( f"{func_name} called on an open bracket, at index {token.start[1]}" ) OPEN_BRACKS = 0 POS_INDEX = 0 POS_TOKENS = tokens[:TOKEN_INDEX][::-1] for POS_TOKEN in POS_TOKENS: string = POS_TOKEN.string if string == ")": OPEN_BRACKS += 1 elif string == "(": OPEN_BRACKS -= 1 if OPEN_BRACKS < 1: break POS_INDEX += 1 if OPEN_BRACKS: raise errors.SubstitutionError( f'{OPEN_BRACKS} Unclosed brackets whilst evalutating "{symbol_func.__qualname__}"' ) POS_TOKENS = POS_TOKENS[::-1] PS_IND = (len(POS_TOKENS) - 1) - POS_INDEX as_eq = [i.string for i in POS_TOKENS[PS_IND:]] del presence[ ((TOKEN_INDEX - POS_INDEX) - 1) : (TOKEN_INDEX + 1) ] TREE = Expression(" ".join(as_eq))._sub( **self._sort_values(*args, **kwargs) ) func = FunctionMarker(symbol_func, TREE) presence.append(func) else: new_pre = [Symbol("("), LAST_POSFIX, Symbol(")")] func = FunctionMarker(symbol_func, new_pre) presence[-1] = func elif map_op: presence.append(Operator(map_op)) else: if string in unknown_mapping: presence.append(convert_type(unknown_mapping[string])) if not (string in ASCII_CHARS): cd = '(' for st in string: if st in unknown_mapping: cd += str(unknown_mapping[st]) else: if st not in ASCII_CHARS: raise errors.SubstitutionError( f"Unknown Token ({string}) at index {token.start[1]}" ) cd += st cd += ' * ' cd = cd[:-3] + ')' presence.extend(Expression(cd, *self.args, **self.kwargs)._sub( update_mappings, *args, **kwargs)) else: presence.append(Unknown(string)) elif type_ == NUMBER: POS_TOKENS = tokens[TOKEN_INDEX:] CURRENT_NUMBER = convert_type(string) if not POS_TOKENS: presence.append(CURRENT_NUMBER) else: NEXT = POS_TOKENS[1] if NEXT.type == NAME: constant = abc.CONSTANTS.get(NEXT.string) function = abc.KEYWORDS.get(NEXT.string) value = unknown_mapping.get(NEXT.string) unk = Unknown(NEXT.string) if value: value = convert_type(value) else: value = unk if constant: SKIP += 1 presence.append(Irrational(constant)) elif not function: SKIP += 1 presence.extend( [ Symbol("("), CURRENT_NUMBER, Operator("*"), value, Symbol(")"), ] ) else: possible_correct = f"{string} * {NEXT.string}" raise errors.SubstitutionError( f'Invalid use of function "{function.__qualname__}" at index {NEXT.start[1]}. Perhaps you ment "{possible_correct}"' ) else: presence.append(CURRENT_NUMBER) else: if string.strip(): raise errors.SubstitutionError( f"Unknown Token ({string}) at index {token.start[1]}" ) ACTUAL_INDEX += len(string) TOKEN_INDEX += 1 if OPEN_BRACKETS > 1: raise errors.SubstitutionError(f"{OPEN_BRACKETS} Unclosed brackets") if return_tokens: return presence, tokens return presence def _glSubCode(self, update_mapping: bool = False, *args, **kwargs): if "dirty" in kwargs: dirty = True vars = kwargs.pop('vars') else: vars = list() dirty = False presence = kwargs.pop('dirty', self._sub(update_mapping, *args, **kwargs)) code = str() VARS = vars pm = 0 for posfix in presence: if isinstance(posfix, Unknown): if posfix.value not in VARS: VARS.append(f"{posfix.value} = Unknown('{posfix.value}')") code += f'{posfix.value}' elif isinstance(posfix, FunctionMarker): func, dirtyTokens = posfix.value evaluated = Expression(...)._glSubCode(*args, **{**kwargs, 'dirty': dirtyTokens, 'vars': VARS}) newVars, evaluated, _ = evaluated VARS.extend(newVars) VARS = list(set(VARS)) code += f"{func.__qualname__}({evaluated})" elif isinstance(posfix, cake.Number): code += f'{posfix.__class__.__name__}({posfix.value})' elif isinstance(posfix, PlusOrMinus): code += '(+|-)' pm += 1 elif isinstance(posfix, (Symbol, Operator)): posfix.validate code += f'{posfix.value}' if not dirty: return "{}\n{}".format('\n'.join(VARS), code), pm return VARS, code, pm def convertToCode(self, update_mapping: bool = False, imports: tuple = tuple(), *args, **kwargs): beginning = GEN_AUTO_CODE_MARKING(*imports) code, _ = self._glSubCode(update_mapping, *args, **kwargs) return f'{beginning}{code}' def substitute(self, update_mapping: bool = False, imports: tuple = tuple(), *args, **kwargs): _, pmCount = self._glSubCode(update_mapping, *args, **kwargs) code = self.convertToCode(update_mapping, imports, *args, **kwargs) combos = cake.getPlusMinusCombos(pmCount) if not combos: return execCode(code) toBeEvaluated = list() for combo in combos: codeCopy = code for symbol in combo: ind = codeCopy.find('(+|-)') cmCopy = list(codeCopy) cmCopy[ind:(ind + 5)] = symbol codeCopy = ''.join(cmCopy) toBeEvaluated.append(codeCopy) results = list() for rCode in toBeEvaluated: results.append(execCode(rCode)) return tuple(results) def solve(self, *args, **kwargs): raise NotImplementedError()
MIT License
openstack/swift
swift/account/backend.py
AccountBroker.merge_items
python
def merge_items(self, item_list, source=None): def _really_merge_items(conn): max_rowid = -1 curs = conn.cursor() for rec in item_list: rec.setdefault('storage_policy_index', 0) record = [rec['name'], rec['put_timestamp'], rec['delete_timestamp'], rec['object_count'], rec['bytes_used'], rec['deleted'], rec['storage_policy_index']] query = ''' SELECT name, put_timestamp, delete_timestamp, object_count, bytes_used, deleted, storage_policy_index FROM container WHERE name = ? ''' if self.get_db_version(conn) >= 1: query += ' AND deleted IN (0, 1)' curs_row = curs.execute(query, (rec['name'],)) curs_row.row_factory = None row = curs_row.fetchone() if row: row = list(row) for i in range(5): if record[i] is None and row[i] is not None: record[i] = row[i] if Timestamp(row[1]) > Timestamp(record[1]): record[1] = row[1] if Timestamp(row[2]) > Timestamp(record[2]): record[2] = row[2] if Timestamp(record[2]) > Timestamp(record[1]) and zero_like(record[3]): record[5] = 1 else: record[5] = 0 curs.execute(''' DELETE FROM container WHERE name = ? AND deleted IN (0, 1) ''', (record[0],)) curs.execute(''' INSERT INTO container (name, put_timestamp, delete_timestamp, object_count, bytes_used, deleted, storage_policy_index) VALUES (?, ?, ?, ?, ?, ?, ?) ''', record) if source: max_rowid = max(max_rowid, rec['ROWID']) if source: try: curs.execute(''' INSERT INTO incoming_sync (sync_point, remote_id) VALUES (?, ?) ''', (max_rowid, source)) except sqlite3.IntegrityError: curs.execute(''' UPDATE incoming_sync SET sync_point=max(?, sync_point) WHERE remote_id=? ''', (max_rowid, source)) conn.commit() with self.get() as conn: try: _really_merge_items(conn) except sqlite3.OperationalError as err: if 'no such column: storage_policy_index' not in str(err): raise self._migrate_add_storage_policy_index(conn) _really_merge_items(conn)
Merge items into the container table. :param item_list: list of dictionaries of {'name', 'put_timestamp', 'delete_timestamp', 'object_count', 'bytes_used', 'deleted', 'storage_policy_index'} :param source: if defined, update incoming_sync with the source
https://github.com/openstack/swift/blob/dbd0960aeebedc0487699d3ca2a4d6f21e7ed524/swift/account/backend.py#L469-L549
from uuid import uuid4 import sqlite3 import six from swift.common.utils import Timestamp, RESERVED_BYTE from swift.common.db import DatabaseBroker, utf8encode, zero_like DATADIR = 'accounts' POLICY_STAT_TRIGGER_SCRIPT = """ CREATE TRIGGER container_insert_ps AFTER INSERT ON container BEGIN INSERT OR IGNORE INTO policy_stat (storage_policy_index, container_count, object_count, bytes_used) VALUES (new.storage_policy_index, 0, 0, 0); UPDATE policy_stat SET container_count = container_count + (1 - new.deleted), object_count = object_count + new.object_count, bytes_used = bytes_used + new.bytes_used WHERE storage_policy_index = new.storage_policy_index; END; CREATE TRIGGER container_delete_ps AFTER DELETE ON container BEGIN UPDATE policy_stat SET container_count = container_count - (1 - old.deleted), object_count = object_count - old.object_count, bytes_used = bytes_used - old.bytes_used WHERE storage_policy_index = old.storage_policy_index; END; """ class AccountBroker(DatabaseBroker): db_type = 'account' db_contains_type = 'container' db_reclaim_timestamp = 'delete_timestamp' def _initialize(self, conn, put_timestamp, **kwargs): if not self.account: raise ValueError( 'Attempting to create a new database with no account set') self.create_container_table(conn) self.create_account_stat_table(conn, put_timestamp) self.create_policy_stat_table(conn) def create_container_table(self, conn): conn.executescript(""" CREATE TABLE container ( ROWID INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT, put_timestamp TEXT, delete_timestamp TEXT, object_count INTEGER, bytes_used INTEGER, deleted INTEGER DEFAULT 0, storage_policy_index INTEGER DEFAULT 0 ); CREATE INDEX ix_container_deleted_name ON container (deleted, name); CREATE TRIGGER container_insert AFTER INSERT ON container BEGIN UPDATE account_stat SET container_count = container_count + (1 - new.deleted), object_count = object_count + new.object_count, bytes_used = bytes_used + new.bytes_used, hash = chexor(hash, new.name, new.put_timestamp || '-' || new.delete_timestamp || '-' || new.object_count || '-' || new.bytes_used); END; CREATE TRIGGER container_update BEFORE UPDATE ON container BEGIN SELECT RAISE(FAIL, 'UPDATE not allowed; DELETE and INSERT'); END; CREATE TRIGGER container_delete AFTER DELETE ON container BEGIN UPDATE account_stat SET container_count = container_count - (1 - old.deleted), object_count = object_count - old.object_count, bytes_used = bytes_used - old.bytes_used, hash = chexor(hash, old.name, old.put_timestamp || '-' || old.delete_timestamp || '-' || old.object_count || '-' || old.bytes_used); END; """ + POLICY_STAT_TRIGGER_SCRIPT) def create_account_stat_table(self, conn, put_timestamp): conn.executescript(""" CREATE TABLE account_stat ( account TEXT, created_at TEXT, put_timestamp TEXT DEFAULT '0', delete_timestamp TEXT DEFAULT '0', container_count INTEGER, object_count INTEGER DEFAULT 0, bytes_used INTEGER DEFAULT 0, hash TEXT default '00000000000000000000000000000000', id TEXT, status TEXT DEFAULT '', status_changed_at TEXT DEFAULT '0', metadata TEXT DEFAULT '' ); INSERT INTO account_stat (container_count) VALUES (0); """) conn.execute(''' UPDATE account_stat SET account = ?, created_at = ?, id = ?, put_timestamp = ?, status_changed_at = ? ''', (self.account, Timestamp.now().internal, str(uuid4()), put_timestamp, put_timestamp)) def create_policy_stat_table(self, conn): conn.executescript(""" CREATE TABLE policy_stat ( storage_policy_index INTEGER PRIMARY KEY, container_count INTEGER DEFAULT 0, object_count INTEGER DEFAULT 0, bytes_used INTEGER DEFAULT 0 ); INSERT OR IGNORE INTO policy_stat ( storage_policy_index, container_count, object_count, bytes_used ) SELECT 0, container_count, object_count, bytes_used FROM account_stat WHERE container_count > 0; """) def get_db_version(self, conn): if self._db_version == -1: self._db_version = 0 for row in conn.execute(''' SELECT name FROM sqlite_master WHERE name = 'ix_container_deleted_name' '''): self._db_version = 1 return self._db_version def _commit_puts_load(self, item_list, entry): (name, put_timestamp, delete_timestamp, object_count, bytes_used, deleted) = entry[:6] if len(entry) > 6: storage_policy_index = entry[6] else: storage_policy_index = 0 item_list.append( {'name': name, 'put_timestamp': put_timestamp, 'delete_timestamp': delete_timestamp, 'object_count': object_count, 'bytes_used': bytes_used, 'deleted': deleted, 'storage_policy_index': storage_policy_index}) def empty(self): self._commit_puts_stale_ok() with self.get() as conn: row = conn.execute( 'SELECT container_count from account_stat').fetchone() return zero_like(row[0]) def make_tuple_for_pickle(self, record): return (record['name'], record['put_timestamp'], record['delete_timestamp'], record['object_count'], record['bytes_used'], record['deleted'], record['storage_policy_index']) def put_container(self, name, put_timestamp, delete_timestamp, object_count, bytes_used, storage_policy_index): if Timestamp(delete_timestamp) > Timestamp(put_timestamp) and zero_like(object_count): deleted = 1 else: deleted = 0 record = {'name': name, 'put_timestamp': put_timestamp, 'delete_timestamp': delete_timestamp, 'object_count': object_count, 'bytes_used': bytes_used, 'deleted': deleted, 'storage_policy_index': storage_policy_index} self.put_record(record) def _is_deleted_info(self, status, container_count, delete_timestamp, put_timestamp): return status == 'DELETED' or zero_like(container_count) and ( Timestamp(delete_timestamp) > Timestamp(put_timestamp)) def _is_deleted(self, conn): info = conn.execute(''' SELECT put_timestamp, delete_timestamp, container_count, status FROM account_stat''').fetchone() return self._is_deleted_info(**info) def is_status_deleted(self): with self.get() as conn: row = conn.execute(''' SELECT put_timestamp, delete_timestamp, status FROM account_stat''').fetchone() return row['status'] == "DELETED" or ( row['delete_timestamp'] > row['put_timestamp']) def get_policy_stats(self, do_migrations=False): columns = [ 'storage_policy_index', 'container_count', 'object_count', 'bytes_used', ] def run_query(): return (conn.execute(''' SELECT %s FROM policy_stat ''' % ', '.join(columns)).fetchall()) self._commit_puts_stale_ok() info = [] with self.get() as conn: try: info = run_query() except sqlite3.OperationalError as err: if "no such column: container_count" in str(err): if do_migrations: self._migrate_add_container_count(conn) else: columns.remove('container_count') info = run_query() elif "no such table: policy_stat" in str(err): if do_migrations: self.create_policy_stat_table(conn) info = run_query() else: raise policy_stats = {} for row in info: stats = dict(row) key = stats.pop('storage_policy_index') policy_stats[key] = stats return policy_stats def get_info(self): self._commit_puts_stale_ok() with self.get() as conn: return dict(conn.execute(''' SELECT account, created_at, put_timestamp, delete_timestamp, status_changed_at, container_count, object_count, bytes_used, hash, id FROM account_stat ''').fetchone()) def list_containers_iter(self, limit, marker, end_marker, prefix, delimiter, reverse=False, allow_reserved=False): delim_force_gte = False if six.PY2: (marker, end_marker, prefix, delimiter) = utf8encode( marker, end_marker, prefix, delimiter) if reverse: marker, end_marker = end_marker, marker self._commit_puts_stale_ok() if delimiter and not prefix: prefix = '' if prefix: end_prefix = prefix[:-1] + chr(ord(prefix[-1]) + 1) orig_marker = marker with self.get() as conn: results = [] while len(results) < limit: query = """ SELECT name, object_count, bytes_used, put_timestamp, 0 FROM container WHERE """ query_args = [] if end_marker and (not prefix or end_marker < end_prefix): query += ' name < ? AND' query_args.append(end_marker) elif prefix: query += ' name < ? AND' query_args.append(end_prefix) if delim_force_gte: query += ' name >= ? AND' query_args.append(marker) delim_force_gte = False elif marker and (not prefix or marker >= prefix): query += ' name > ? AND' query_args.append(marker) elif prefix: query += ' name >= ? AND' query_args.append(prefix) if not allow_reserved: query += ' name >= ? AND' query_args.append(chr(ord(RESERVED_BYTE) + 1)) if self.get_db_version(conn) < 1: query += ' +deleted = 0' else: query += ' deleted = 0' query += ' ORDER BY name %s LIMIT ?' % ('DESC' if reverse else '') query_args.append(limit - len(results)) curs = conn.execute(query, query_args) curs.row_factory = None if prefix is None or not delimiter: return [r for r in curs] rowcount = 0 for row in curs: rowcount += 1 name = row[0] if reverse: end_marker = name else: marker = name if len(results) >= limit: curs.close() return results end = name.find(delimiter, len(prefix)) if end >= 0: if reverse: end_marker = name[:end + len(delimiter)] else: marker = ''.join([ name[:end], delimiter[:-1], chr(ord(delimiter[-1:]) + 1), ]) delim_force_gte = True dir_name = name[:end + len(delimiter)] if dir_name != orig_marker: results.append([dir_name, 0, 0, '0', 1]) curs.close() break results.append(row) if not rowcount: break return results
Apache License 2.0
mgharbi/ttools
ttools/training.py
ModelInterface.validation_step
python
def validation_step(self, batch, running_val_data): LOG.warning("Running a ModelInterface validation step that was not overriden: this is a no-op.") return {}
Updates the running validataion with the current batch's results. The default implementation is a no-op Args: batch (dict): batch of data provided by a data pipeline. running_val_data (dict): current aggregates of the validation loop. Returns: updated_data (dict): new updated value for the running_val_data.
https://github.com/mgharbi/ttools/blob/0bc410bf89a7f67f7b55ee6e501fc85636cb8ee8/ttools/training.py#L94-L107
from abc import ABCMeta, abstractmethod import argparse import os import re import signal import torch as th from .utils import get_logger LOG = get_logger(__name__) __all__ = ["ModelInterface", "Trainer", "Checkpointer", "BasicArgumentParser"] class BasicArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): super(BasicArgumentParser, self).__init__(*args, **kwargs) self.add_argument("--data", required=True, help="path to the training data.") self.add_argument("--val_data", help="path to the validation data.") self.add_argument("--config", help="path to a config file.") self.add_argument("--checkpoint_dir", required=True, help="Output directory where checkpoints are saved") self.add_argument("--init_from", help="path to a checkpoint from which to try and initialize the weights.") self.add_argument("--lr", type=float, default=1e-4, help="Learning rate for the optimizer") self.add_argument("--bs", type=int, default=4, help="Batch size") self.add_argument("--num_epochs", type=int, help="Number of epochs to train for") self.add_argument("--num_worker_threads", default=4, type=int, help="Number of threads that load data") self.add_argument("--cuda", action="store_true", dest="cuda", help="Force GPU") self.add_argument("--no-cuda", action="store_false", dest="cuda", help="Force CPU") self.add_argument("--server", help="Visdom server url") self.add_argument("--base_url", default="/", help="Visdom base url") self.add_argument("--env", default="main", help="Visdom environment") self.add_argument("--port", default=8097, type=int, help="Visdom server port") self.add_argument('--debug', dest="debug", action="store_true") self.set_defaults(cuda=th.cuda.is_available(), debug=False) class ModelInterface(metaclass=ABCMeta): def __init__(self): pass @abstractmethod def training_step(self, batch): return {} def init_validation(self): LOG.warning("Running a ModelInterface validation initialization that was not overriden: this is a no-op.") data = {} return data
MIT License
exiahuang/salesforcexytools
xlsxwriter/format.py
Format.set_bottom
python
def set_bottom(self, bottom=1): self.bottom = bottom
Set the Format bottom property. Args: bottom: Default is 1, border type 1. Returns: Nothing.
https://github.com/exiahuang/salesforcexytools/blob/dde689292bc991c1357ec7479a8e291cb537e8ff/xlsxwriter/format.py#L495-L506
from . import xmlwriter class Format(xmlwriter.XMLwriter): def __init__(self, properties={}, xf_indices=None, dxf_indices=None): super(Format, self).__init__() self.xf_format_indices = xf_indices self.dxf_format_indices = dxf_indices self.xf_index = None self.dxf_index = None self.num_format = 0 self.num_format_index = 0 self.font_index = 0 self.has_font = 0 self.has_dxf_font = 0 self.bold = 0 self.underline = 0 self.italic = 0 self.font_name = 'Calibri' self.font_size = 11 self.font_color = 0x0 self.font_strikeout = 0 self.font_outline = 0 self.font_shadow = 0 self.font_script = 0 self.font_family = 2 self.font_charset = 0 self.font_scheme = 'minor' self.font_condense = 0 self.font_extend = 0 self.theme = 0 self.hyperlink = 0 self.hidden = 0 self.locked = 1 self.text_h_align = 0 self.text_wrap = 0 self.text_v_align = 0 self.text_justlast = 0 self.rotation = 0 self.fg_color = 0 self.bg_color = 0 self.pattern = 0 self.has_fill = 0 self.has_dxf_fill = 0 self.fill_index = 0 self.fill_count = 0 self.border_index = 0 self.has_border = 0 self.has_dxf_border = 0 self.border_count = 0 self.bottom = 0 self.bottom_color = 0 self.diag_border = 0 self.diag_color = 0 self.diag_type = 0 self.left = 0 self.left_color = 0 self.right = 0 self.right_color = 0 self.top = 0 self.top_color = 0 self.indent = 0 self.shrink = 0 self.merge_range = 0 self.reading_order = 0 self.just_distrib = 0 self.color_indexed = 0 self.font_only = 0 for key, value in properties.items(): getattr(self, 'set_' + key)(value) def set_font_name(self, font_name): self.font_name = font_name def set_font_size(self, font_size=11): self.font_size = font_size def set_font_color(self, font_color): self.font_color = self._get_color(font_color) def set_bold(self, bold=True): self.bold = bold def set_italic(self, italic=True): self.italic = italic def set_underline(self, underline=1): self.underline = underline def set_font_strikeout(self, font_strikeout=True): self.font_strikeout = font_strikeout def set_font_script(self, font_script=1): self.font_script = font_script def set_font_outline(self, font_outline=True): self.font_outline = font_outline def set_font_shadow(self, font_shadow=True): self.font_shadow = font_shadow def set_num_format(self, num_format): self.num_format = num_format def set_locked(self, locked=True): self.locked = locked def set_hidden(self, hidden=True): self.hidden = hidden def set_align(self, alignment): alignment = alignment.lower() if alignment == 'left': self.set_text_h_align(1) if alignment == 'centre': self.set_text_h_align(2) if alignment == 'center': self.set_text_h_align(2) if alignment == 'right': self.set_text_h_align(3) if alignment == 'fill': self.set_text_h_align(4) if alignment == 'justify': self.set_text_h_align(5) if alignment == 'center_across': self.set_text_h_align(6) if alignment == 'centre_across': self.set_text_h_align(6) if alignment == 'distributed': self.set_text_h_align(7) if alignment == 'justify_distributed': self.set_text_h_align(7) if alignment == 'justify_distributed': self.just_distrib = 1 if alignment == 'top': self.set_text_v_align(1) if alignment == 'vcentre': self.set_text_v_align(2) if alignment == 'vcenter': self.set_text_v_align(2) if alignment == 'bottom': self.set_text_v_align(3) if alignment == 'vjustify': self.set_text_v_align(4) if alignment == 'vdistributed': self.set_text_v_align(5) def set_center_across(self, align_type=None): self.set_text_h_align(6) def set_text_wrap(self, text_wrap=True): self.text_wrap = text_wrap def set_rotation(self, rotation): rotation = int(rotation) if rotation == 270: rotation = 255 elif -90 <= rotation <= 90: if rotation < 0: rotation = -rotation + 90 else: raise Exception( "Rotation rotation outside range: -90 <= angle <= 90") self.rotation = rotation def set_indent(self, indent=1): self.indent = indent def set_shrink(self, shrink=True): self.shrink = shrink def set_text_justlast(self, text_justlast=True): self.text_justlast = text_justlast def set_pattern(self, pattern=1): self.pattern = pattern def set_bg_color(self, bg_color): self.bg_color = self._get_color(bg_color) def set_fg_color(self, fg_color): self.fg_color = self._get_color(fg_color) def set_border(self, style=1): self.set_bottom(style) self.set_top(style) self.set_left(style) self.set_right(style) def set_border_color(self, color): self.set_bottom_color(color) self.set_top_color(color) self.set_left_color(color) self.set_right_color(color)
Apache License 2.0
davidtvs/pytorch-lr-finder
torch_lr_finder/lr_finder.py
StateCacher.__del__
python
def __del__(self): if self.in_memory: return for k in self.cached: if os.path.exists(self.cached[k]): os.remove(self.cached[k])
Check whether there are unused cached files existing in `cache_dir` before this instance being destroyed.
https://github.com/davidtvs/pytorch-lr-finder/blob/acc5e7ee7711a460bf3e1cc5c5f05575ba1e1b4b/torch_lr_finder/lr_finder.py#L645-L654
import copy import os import torch import numpy as np from tqdm.autonotebook import tqdm from torch.optim.lr_scheduler import _LRScheduler import matplotlib.pyplot as plt from torch.utils.data import DataLoader from packaging import version PYTORCH_VERSION = version.parse(torch.__version__) try: from apex import amp IS_AMP_AVAILABLE = True except ImportError: IS_AMP_AVAILABLE = False class DataLoaderIter(object): def __init__(self, data_loader): self.data_loader = data_loader self._iterator = iter(data_loader) @property def dataset(self): return self.data_loader.dataset def inputs_labels_from_batch(self, batch_data): if not isinstance(batch_data, list) and not isinstance(batch_data, tuple): raise ValueError( "Your batch type is not supported: {}. Please inherit from " "`TrainDataLoaderIter` or `ValDataLoaderIter` and override the " "`inputs_labels_from_batch` method.".format(type(batch_data)) ) inputs, labels, *_ = batch_data return inputs, labels def __iter__(self): return self def __next__(self): batch = next(self._iterator) return self.inputs_labels_from_batch(batch) class TrainDataLoaderIter(DataLoaderIter): def __init__(self, data_loader, auto_reset=True): super().__init__(data_loader) self.auto_reset = auto_reset def __next__(self): try: batch = next(self._iterator) inputs, labels = self.inputs_labels_from_batch(batch) except StopIteration: if not self.auto_reset: raise self._iterator = iter(self.data_loader) batch = next(self._iterator) inputs, labels = self.inputs_labels_from_batch(batch) return inputs, labels class ValDataLoaderIter(DataLoaderIter): def __init__(self, data_loader): super().__init__(data_loader) self.run_limit = len(self.data_loader) self.run_counter = 0 def __iter__(self): if self.run_counter >= self.run_limit: self._iterator = iter(self.data_loader) self.run_counter = 0 return self def __next__(self): self.run_counter += 1 return super(ValDataLoaderIter, self).__next__() class LRFinder(object): def __init__( self, model, optimizer, criterion, device=None, memory_cache=True, cache_dir=None, ): self.optimizer = optimizer self._check_for_scheduler() self.model = model self.criterion = criterion self.history = {"lr": [], "loss": []} self.best_loss = None self.memory_cache = memory_cache self.cache_dir = cache_dir self.model_device = next(self.model.parameters()).device self.state_cacher = StateCacher(memory_cache, cache_dir=cache_dir) self.state_cacher.store("model", self.model.state_dict()) self.state_cacher.store("optimizer", self.optimizer.state_dict()) if device: self.device = device else: self.device = self.model_device def reset(self): self.model.load_state_dict(self.state_cacher.retrieve("model")) self.optimizer.load_state_dict(self.state_cacher.retrieve("optimizer")) self.model.to(self.model_device) def range_test( self, train_loader, val_loader=None, start_lr=None, end_lr=10, num_iter=100, step_mode="exp", smooth_f=0.05, diverge_th=5, accumulation_steps=1, non_blocking_transfer=True, ): self.history = {"lr": [], "loss": []} self.best_loss = None self.model.to(self.device) self._check_for_scheduler() if start_lr: self._set_learning_rate(start_lr) if step_mode.lower() == "exp": lr_schedule = ExponentialLR(self.optimizer, end_lr, num_iter) elif step_mode.lower() == "linear": lr_schedule = LinearLR(self.optimizer, end_lr, num_iter) else: raise ValueError("expected one of (exp, linear), got {}".format(step_mode)) if smooth_f < 0 or smooth_f >= 1: raise ValueError("smooth_f is outside the range [0, 1[") if isinstance(train_loader, DataLoader): train_iter = TrainDataLoaderIter(train_loader) elif isinstance(train_loader, TrainDataLoaderIter): train_iter = train_loader else: raise ValueError( "`train_loader` has unsupported type: {}." "Expected types are `torch.utils.data.DataLoader`" "or child of `TrainDataLoaderIter`.".format(type(train_loader)) ) if val_loader: if isinstance(val_loader, DataLoader): val_iter = ValDataLoaderIter(val_loader) elif isinstance(val_loader, ValDataLoaderIter): val_iter = val_loader else: raise ValueError( "`val_loader` has unsupported type: {}." "Expected types are `torch.utils.data.DataLoader`" "or child of `ValDataLoaderIter`.".format(type(val_loader)) ) for iteration in tqdm(range(num_iter)): loss = self._train_batch( train_iter, accumulation_steps, non_blocking_transfer=non_blocking_transfer, ) if val_loader: loss = self._validate( val_iter, non_blocking_transfer=non_blocking_transfer ) self.history["lr"].append(lr_schedule.get_lr()[0]) lr_schedule.step() if iteration == 0: self.best_loss = loss else: if smooth_f > 0: loss = smooth_f * loss + (1 - smooth_f) * self.history["loss"][-1] if loss < self.best_loss: self.best_loss = loss self.history["loss"].append(loss) if loss > diverge_th * self.best_loss: print("Stopping early, the loss has diverged") break print("Learning rate search finished. See the graph with {finder_name}.plot()") def _set_learning_rate(self, new_lrs): if not isinstance(new_lrs, list): new_lrs = [new_lrs] * len(self.optimizer.param_groups) if len(new_lrs) != len(self.optimizer.param_groups): raise ValueError( "Length of `new_lrs` is not equal to the number of parameter groups " + "in the given optimizer" ) for param_group, new_lr in zip(self.optimizer.param_groups, new_lrs): param_group["lr"] = new_lr def _check_for_scheduler(self): for param_group in self.optimizer.param_groups: if "initial_lr" in param_group: raise RuntimeError("Optimizer already has a scheduler attached to it") def _train_batch(self, train_iter, accumulation_steps, non_blocking_transfer=True): self.model.train() total_loss = None self.optimizer.zero_grad() for i in range(accumulation_steps): inputs, labels = next(train_iter) inputs, labels = self._move_to_device( inputs, labels, non_blocking=non_blocking_transfer ) outputs = self.model(inputs) loss = self.criterion(outputs, labels) loss /= accumulation_steps if IS_AMP_AVAILABLE and hasattr(self.optimizer, "_amp_stash"): delay_unscale = ((i + 1) % accumulation_steps) != 0 with amp.scale_loss( loss, self.optimizer, delay_unscale=delay_unscale ) as scaled_loss: scaled_loss.backward() else: loss.backward() if total_loss is None: total_loss = loss else: total_loss += loss self.optimizer.step() return total_loss.item() def _move_to_device(self, inputs, labels, non_blocking=True): def move(obj, device, non_blocking=True): if hasattr(obj, "to"): return obj.to(device, non_blocking=non_blocking) elif isinstance(obj, tuple): return tuple(move(o, device, non_blocking) for o in obj) elif isinstance(obj, list): return [move(o, device, non_blocking) for o in obj] elif isinstance(obj, dict): return {k: move(o, device, non_blocking) for k, o in obj.items()} else: return obj inputs = move(inputs, self.device, non_blocking=non_blocking) labels = move(labels, self.device, non_blocking=non_blocking) return inputs, labels def _validate(self, val_iter, non_blocking_transfer=True): running_loss = 0 self.model.eval() with torch.no_grad(): for inputs, labels in val_iter: inputs, labels = self._move_to_device( inputs, labels, non_blocking=non_blocking_transfer ) outputs = self.model(inputs) loss = self.criterion(outputs, labels) running_loss += loss.item() * len(labels) return running_loss / len(val_iter.dataset) def plot( self, skip_start=10, skip_end=5, log_lr=True, show_lr=None, ax=None, suggest_lr=True, ): if skip_start < 0: raise ValueError("skip_start cannot be negative") if skip_end < 0: raise ValueError("skip_end cannot be negative") if show_lr is not None and not isinstance(show_lr, float): raise ValueError("show_lr must be float") lrs = self.history["lr"] losses = self.history["loss"] if skip_end == 0: lrs = lrs[skip_start:] losses = losses[skip_start:] else: lrs = lrs[skip_start:-skip_end] losses = losses[skip_start:-skip_end] fig = None if ax is None: fig, ax = plt.subplots() ax.plot(lrs, losses) if suggest_lr: print("LR suggestion: steepest gradient") min_grad_idx = None try: min_grad_idx = (np.gradient(np.array(losses))).argmin() except ValueError: print( "Failed to compute the gradients, there might not be enough points." ) if min_grad_idx is not None: print("Suggested LR: {:.2E}".format(lrs[min_grad_idx])) ax.scatter( lrs[min_grad_idx], losses[min_grad_idx], s=75, marker="o", color="red", zorder=3, label="steepest gradient", ) ax.legend() if log_lr: ax.set_xscale("log") ax.set_xlabel("Learning rate") ax.set_ylabel("Loss") if show_lr is not None: ax.axvline(x=show_lr, color="red") if fig is not None: plt.show() if suggest_lr and min_grad_idx is not None: return ax, lrs[min_grad_idx] else: return ax class LinearLR(_LRScheduler): def __init__(self, optimizer, end_lr, num_iter, last_epoch=-1): self.end_lr = end_lr if num_iter <= 1: raise ValueError("`num_iter` must be larger than 1") self.num_iter = num_iter super(LinearLR, self).__init__(optimizer, last_epoch) def get_lr(self): if PYTORCH_VERSION < version.parse("1.1.0"): curr_iter = self.last_epoch + 1 r = curr_iter / (self.num_iter - 1) else: r = self.last_epoch / (self.num_iter - 1) return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs] class ExponentialLR(_LRScheduler): def __init__(self, optimizer, end_lr, num_iter, last_epoch=-1): self.end_lr = end_lr if num_iter <= 1: raise ValueError("`num_iter` must be larger than 1") self.num_iter = num_iter super(ExponentialLR, self).__init__(optimizer, last_epoch) def get_lr(self): if PYTORCH_VERSION < version.parse("1.1.0"): curr_iter = self.last_epoch + 1 r = curr_iter / (self.num_iter - 1) else: r = self.last_epoch / (self.num_iter - 1) return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs] class StateCacher(object): def __init__(self, in_memory, cache_dir=None): self.in_memory = in_memory self.cache_dir = cache_dir if self.cache_dir is None: import tempfile self.cache_dir = tempfile.gettempdir() else: if not os.path.isdir(self.cache_dir): raise ValueError("Given `cache_dir` is not a valid directory.") self.cached = {} def store(self, key, state_dict): if self.in_memory: self.cached.update({key: copy.deepcopy(state_dict)}) else: fn = os.path.join(self.cache_dir, "state_{}_{}.pt".format(key, id(self))) self.cached.update({key: fn}) torch.save(state_dict, fn) def retrieve(self, key): if key not in self.cached: raise KeyError("Target {} was not cached.".format(key)) if self.in_memory: return self.cached.get(key) else: fn = self.cached.get(key) if not os.path.exists(fn): raise RuntimeError( "Failed to load state in {}. File doesn't exist anymore.".format(fn) ) state_dict = torch.load(fn, map_location=lambda storage, location: storage) return state_dict
MIT License
xmunoz/sodapy
sodapy/socrata.py
Socrata.publish
python
def publish(self, dataset_identifier, content_type="json"): base = utils.format_old_api_request(dataid=dataset_identifier) resource = "{}/publication.{}".format(base, content_type) return self._perform_request("post", resource)
The create() method creates a dataset in a "working copy" state. This method publishes it.
https://github.com/xmunoz/sodapy/blob/8a815a7ac3f4d955f0ac7f9b525c8755584179b4/sodapy/socrata.py#L356-L364
import csv from io import StringIO, IOBase import json import logging import os import re import requests from sodapy.constants import DATASETS_PATH import sodapy.utils as utils class Socrata: DEFAULT_LIMIT = 1000 def __init__( self, domain, app_token, username=None, password=None, access_token=None, session_adapter=None, timeout=10, ): if not domain: raise Exception("A domain is required.") self.domain = domain self.session = requests.Session() if not app_token: logging.warning( "Requests made without an app_token will be" " subject to strict throttling limits." ) else: self.session.headers.update({"X-App-token": app_token}) utils.authentication_validation(username, password, access_token) if username and password: self.session.auth = (username, password) elif access_token: self.session.headers.update( {"Authorization": "OAuth {}".format(access_token)} ) if session_adapter: self.session.mount(session_adapter["prefix"], session_adapter["adapter"]) self.uri_prefix = session_adapter["prefix"] else: self.uri_prefix = "https://" if not isinstance(timeout, (int, float)): raise TypeError("Timeout must be numeric.") self.timeout = timeout def __enter__(self): return self def __exit__(self, exc_type=None, exc_value=None, traceback=None): self.close() def datasets(self, limit=0, offset=0, order=None, **kwargs): filter_multiple = set( [ "ids", "domains", "categories", "tags", "only", "shared_to", "column_names", ] ) filter_single = set( [ "q", "min_should_match", "attribution", "license", "derived_from", "provenance", "for_user", "visibility", "public", "published", "approval_status", "explicitly_hidden", "derived", ] ) all_filters = filter_multiple.union(filter_single) for key in kwargs: if key not in all_filters: raise TypeError("Unexpected keyword argument %s" % key) params = [("domains", self.domain)] if limit: params.append(("limit", limit)) for key, value in kwargs.items(): if key in filter_multiple: for item in value: params.append((key, item)) elif key in filter_single: params.append((key, value)) if order: params.append(("order", order)) results = self._perform_request( "get", DATASETS_PATH, params=params + [("offset", offset)] ) num_results = results["resultSetSize"] if ( limit >= num_results or limit == len(results["results"]) or num_results == len(results["results"]) ): return results["results"] if limit != 0: raise Exception( "Unexpected number of results returned from endpoint.\ Expected {}, got {}.".format( limit, len(results["results"]) ) ) all_results = results["results"] while len(all_results) != num_results: offset += len(results["results"]) results = self._perform_request( "get", DATASETS_PATH, params=params + [("offset", offset)] ) all_results.extend(results["results"]) return all_results def create(self, name, **kwargs): new_backend = kwargs.pop("new_backend", False) resource = utils.format_old_api_request(content_type="json") if new_backend: resource += "?nbe=true" payload = {"name": name} if "row_identifier" in kwargs: payload["metadata"] = {"rowIdentifier": kwargs.pop("row_identifier", None)} payload.update(kwargs) payload = utils.clear_empty_values(payload) return self._perform_update("post", resource, payload) def set_permission( self, dataset_identifier, permission="private", content_type="json" ): resource = utils.format_old_api_request( dataid=dataset_identifier, content_type=content_type ) params = { "method": "setPermission", "value": "public.read" if permission == "public" else permission, } return self._perform_request("put", resource, params=params) def get_metadata(self, dataset_identifier, content_type="json"): resource = utils.format_old_api_request( dataid=dataset_identifier, content_type=content_type ) return self._perform_request("get", resource) def update_metadata(self, dataset_identifier, update_fields, content_type="json"): resource = utils.format_old_api_request( dataid=dataset_identifier, content_type=content_type ) return self._perform_update("put", resource, update_fields) def download_attachments( self, dataset_identifier, content_type="json", download_dir="~/sodapy_downloads" ): metadata = self.get_metadata(dataset_identifier, content_type=content_type) files = [] attachments = metadata["metadata"].get("attachments") if not attachments: logging.info("No attachments were found or downloaded.") return files download_dir = os.path.join( os.path.expanduser(download_dir), dataset_identifier ) if not os.path.exists(download_dir): os.makedirs(download_dir) for attachment in attachments: file_path = os.path.join(download_dir, attachment["filename"]) has_assetid = attachment.get("assetId", False) if has_assetid: base = utils.format_old_api_request(dataid=dataset_identifier) assetid = attachment["assetId"] resource = "{}/files/{}?download=true&filename={}".format( base, assetid, attachment["filename"] ) else: base = "/api/assets" assetid = attachment["blobId"] resource = "{}/{}?download=true".format(base, assetid) uri = "{}{}{}".format(self.uri_prefix, self.domain, resource) utils.download_file(uri, file_path) files.append(file_path) logging.info( "The following files were downloaded:\n\t%s", "\n\t".join(files) ) return files
MIT License
skopos-team/skopos
skopos/memories/memory.py
Memory.lifo_replace
python
def lifo_replace(number_of_episodes): return number_of_episodes - 1
Return the index of the latest element added in the list
https://github.com/skopos-team/skopos/blob/d200b83f0aeff514f4759690f1f3041af54ea383/skopos/memories/memory.py#L22-L24
from __future__ import absolute_import import random import numpy as np class Memory(object): def __init__(self): super(Memory, self).__init__() """ Methods to extract the index of the list to be replaced""" def random_replace(number_of_episodes): return random.randint(0, number_of_episodes) def fifo_replace(number_of_episodes): return 0
Apache License 2.0
vcasecnikovs/yet-another-yolov4-pytorch
lars.py
LARS.step
python
def step(self, epoch=None, closure=None): loss = None if closure is not None: loss = closure() if epoch is None: epoch = self.epoch self.epoch += 1 for group in self.param_groups: weight_decay = group['weight_decay'] momentum = group['momentum'] eta = group['eta'] lr = group['lr'] max_epoch = group['max_epoch'] for p in group['params']: if p.grad is None: continue param_state = self.state[p] d_p = p.grad.data weight_norm = torch.norm(p.data) grad_norm = torch.norm(d_p) decay = (1 - float(epoch) / max_epoch) ** 2 global_lr = lr * decay local_lr = eta * weight_norm / (grad_norm + weight_decay * weight_norm) actual_lr = local_lr * global_lr if 'momentum_buffer' not in param_state: buf = param_state['momentum_buffer'] = torch.zeros_like(p.data) else: buf = param_state['momentum_buffer'] buf.mul_(momentum).add_(actual_lr, d_p + weight_decay * p.data) p.data.add_(-buf) return loss
Performs a single optimization step. Arguments: closure (callable, optional): A closure that reevaluates the model and returns the loss. epoch: current epoch to calculate polynomial LR decay schedule. if None, uses self.epoch and increments it.
https://github.com/vcasecnikovs/yet-another-yolov4-pytorch/blob/47b045014e6d7686fad7a6cbe3d3d3e5373a2b7e/lars.py#L39-L91
import torch from torch.optim.optimizer import Optimizer, required class LARS(Optimizer): def __init__(self, params, lr=required, momentum=.9, weight_decay=.0005, eta=0.001, max_epoch=200): if lr is not required and lr < 0.0: raise ValueError("Invalid learning rate: {}".format(lr)) if momentum < 0.0: raise ValueError("Invalid momentum value: {}".format(momentum)) if weight_decay < 0.0: raise ValueError("Invalid weight_decay value: {}" .format(weight_decay)) if eta < 0.0: raise ValueError("Invalid LARS coefficient value: {}".format(eta)) self.epoch = 0 defaults = dict(lr=lr, momentum=momentum, weight_decay=weight_decay, eta=eta, max_epoch=max_epoch) super(LARS, self).__init__(params, defaults)
MIT License
mishbahr/djangocms-forms
djangocms_forms/forms.py
FormDefinitionAdminForm.clean_form_template
python
def clean_form_template(self): form_template = self.cleaned_data.get('form_template', '') if form_template: try: get_template(form_template) except TemplateDoesNotExist: msg = _('Selected Form Template does not exist.') raise forms.ValidationError(msg) return form_template
Check if template exists
https://github.com/mishbahr/djangocms-forms/blob/9d7a4ef9769fd5e1526921c084d6da7b8070a2c1/djangocms_forms/forms.py#L75-L84
from __future__ import unicode_literals import re from django import forms from django.contrib.admin.widgets import AdminDateWidget, FilteredSelectMultiple from django.core.mail import EmailMultiAlternatives from django.core.urlresolvers import reverse from django.template import TemplateDoesNotExist from django.template.defaultfilters import slugify from django.template.loader import get_template, render_to_string from django.utils.translation import ugettext_lazy as _ from ipware.ip import get_ip from unidecode import unidecode from .fields import FormBuilderFileField, HoneyPotField, MultipleChoiceAutoCompleteField, ReCaptchaField from .models import Form, FormDefinition, FormField, FormSubmission from .utils import int_to_hashid from .widgets import DateInput, TelephoneInput, TimeInput class FormFieldInlineForm(forms.ModelForm): def clean(self): cleaned_data = super(FormFieldInlineForm, self).clean() requires_choice_values = ['checkbox_multiple', 'select', 'radio'] if (cleaned_data.get('field_type') in requires_choice_values and not cleaned_data.get('choice_values')): error_msg = _('This field is required.') self._errors['choice_values'] = self.error_class([error_msg]) return cleaned_data class Meta: model = FormField fields = '__all__' class FormDefinitionAdminForm(forms.ModelForm): def clean(self): cleaned_data = super(FormDefinitionAdminForm, self).clean() populated_count = 0 storage_fields = ('email_to', 'save_data', ) for field in storage_fields: if cleaned_data.get(field, None): populated_count += 1 if not populated_count: error_msg = _( 'You must choose a storage option for this Form. ' 'You can choose to use multiple storage options if you prefer. ') for field in storage_fields: self._errors[field] = self.error_class([error_msg]) page_redirect = cleaned_data.get('page_redirect') external_redirect = cleaned_data.get('external_redirect') redirect_delay = cleaned_data.get('redirect_delay') if redirect_delay and not any([page_redirect, external_redirect]): self._errors['redirect_delay'] = self.error_class([ _('You must specify either a page or external redirect when ' 'adding a redirect delay.') ]) return cleaned_data
BSD 3-Clause New or Revised License
dankilman/awe
awe/view.py
Element.new
python
def new(self, obj, **kwargs): from . import parser if CustomElement._is_custom(obj) and not obj._registered: self.register(obj) if parser.is_parsable(obj): context = parser.ParserContext(inputs=kwargs.pop('inputs', None)) element_configuration = self._parse(obj, context) return self._new_children(element_configuration, **kwargs) else: return self._new_child(obj, **kwargs)
This method can return different results depending on ``obj`` type. If ``obj`` is a class that inherits from Element, a new element of that type will be created. If ``obj`` is a dict or list, it will be parsed and the parser result will be created. If ``obj`` is a string, it will be yaml loaded and that result will be passed to the parser. When result is passed to the parser, an additional ``inputs`` argument can be supplied as a dict from keys to values that are referenced in the DSL using the ``$`` intrinsic function. :param obj: The ``Element`` subclass, a dict/list or a string to be passed to the parser. :param kwargs: Arguments that should be passed to the ``_init`` method of the created element or one of ``props``, ``style``, ``id``, ``inputs`` if valid. :return: The created element.
https://github.com/dankilman/awe/blob/56871e5cb1b1a925fbbb9b94704fc100dd76202b/awe/view.py#L263-L287
from collections import deque import pydash import six from typing import List from . import variables from . import element_updater builtin_element_types = {} def builtin(cls): builtin_element_types[cls.__name__] = cls return cls class Element(object): allow_children = True def __init__(self, root, parent, element_id, props, style, stack): self.root = root self.id = element_id or str(id(self)) self.root_id = getattr(root, 'id', self.id) self.element_builder = getattr(root, 'element_builder', ElementBuilder(root)) self.element_type = type(self).__name__ self.parent = parent self.index = len(parent.children) + 1 if isinstance(parent, Element) else 0 self.children = [] self.ref = Ref() self.data = {} self.props = props or {} self.props['key'] = self.id if style: self.props['style'] = style self._prop_children = {} self._init_complete = False self._removed = False self._stack = stack def new_grid(self, columns, **kwargs): return self._new_child(Grid, columns=columns, **kwargs) def new_tabs(self, **kwargs): return self._new_child(Tabs, **kwargs) def new_table(self, headers, page_size=None, **kwargs): return self._new_child(Table, headers=headers, page_size=page_size, **kwargs) def new_button(self, function, text='', icon=None, shape=None, type='default', block=False, **kwargs): return self._new_child( Button, function=function, text=text, icon=icon, shape=shape, type=type, block=block, **kwargs ) def new_input(self, placeholder=None, on_enter=None, **kwargs): return self._new_child(Input, placeholder=placeholder, on_enter=on_enter, **kwargs) def new_card(self, text='', **kwargs): return self._new_child(Card, text=text, **kwargs) def new_text(self, text='', **kwargs): return self._new_child(Text, text=text, **kwargs) def new_divider(self, **kwargs): return self._new_child(Divider, **kwargs) def new_collapse(self, **kwargs): return self._new_child(Collapse, **kwargs) def new_chart(self, data=None, options=None, transform=None, moving_window=None, **kwargs): from .chart import Chart return self._new_child( Chart, data=data, options=options, transform=transform, moving_window=moving_window, **kwargs ) def new_icon(self, type, theme='outlined', spin=False, two_tone_color=None, **kwargs): return self._new_child(Icon, type=type, theme=theme, spin=spin, two_tone_color=two_tone_color, **kwargs) def new_inline(self, text='', **kwargs): return self._new_child(Inline, text=text, **kwargs) def new_link(self, link, **kwargs): props = kwargs.setdefault('props', {}) props.setdefault('href', link) return self._new_child(Raw, tag='a', **kwargs) def new_markdown(self, source, **kwargs): return self._new_child(Markdown, source=source, **kwargs) def new_prop(self, prop, root=None): assert self.parent assert prop not in self.props assert prop not in self._prop_children result = root or self._new_root() self._prop_children[prop] = result.id if self._init_complete: self._dispatch({ 'type': 'newPropChild', 'id': result.id, 'prop': prop, 'elementRootId': self.root_id, 'elementId': self.id }) return result
MIT License
alexandonian/pretorched-x
pretorched/models/utils.py
SizeEstimator.estimate_size
python
def estimate_size(self): self.get_parameter_sizes() self.get_output_sizes() self.calc_param_bits() self.calc_forward_backward_bits() self.calc_input_bits() total = self.param_bits + self.forward_backward_bits + self.input_bits total_megabytes = (total / 8) / (1024**2) return total_megabytes, total
Estimate model size in memory in megabytes and bits.
https://github.com/alexandonian/pretorched-x/blob/36a57540854142be1b1730fe26aa4eab9b0aacad/pretorched/models/utils.py#L68-L78
import torch import numpy as np class SizeEstimator(object): def __init__(self, model, input_size=(1, 1, 32, 32), bits=32): self.model = model self.input_size = input_size self.bits = 32 return def get_parameter_sizes(self): mods = list(self.model.modules()) sizes = [] for i in range(1, len(mods)): m = mods[i] p = list(m.parameters()) for j in range(len(p)): sizes.append(np.array(p[j].size())) self.param_sizes = sizes return def get_output_sizes(self): input_ = torch.FloatTensor(*self.input_size) mods = list(self.model.modules()) out_sizes = [] for i in range(1, len(mods)): m = mods[i] out = m(input_) out_sizes.append(np.array(out.size())) input_ = out self.out_sizes = out_sizes return def calc_param_bits(self): total_bits = 0 for i in range(len(self.param_sizes)): s = self.param_sizes[i] bits = np.prod(np.array(s)) * self.bits total_bits += bits self.param_bits = total_bits return def calc_forward_backward_bits(self): total_bits = 0 for i in range(len(self.out_sizes)): s = self.out_sizes[i] bits = np.prod(np.array(s)) * self.bits total_bits += bits self.forward_backward_bits = (total_bits * 2) return def calc_input_bits(self): self.input_bits = np.prod(np.array(self.input_size)) * self.bits return
MIT License
mila-iqia/myia
myia/testing/common.py
ai32_of
python
def ai32_of(*shp, value=ANYTHING): return arr_of(i32, shp, value)
Generate symbolic array of int32.
https://github.com/mila-iqia/myia/blob/56774a39579b4ec4123f44843ad4ca688acc859b/myia/testing/common.py#L63-L65
import inspect import typing from dataclasses import dataclass, is_dataclass import numpy as np from ovld import ovld from myia import ArithmeticData, xtype from myia.abstract import ( ANYTHING, SHAPE, TYPE, VALUE, AbstractArray, AbstractBottom, AbstractClass, AbstractDict, AbstractExternal, AbstractHandle, AbstractJTagged, AbstractScalar, AbstractTaggedUnion, AbstractTuple, AbstractType, AbstractUnion, AbstractValue, empty, from_value, listof, type_to_abstract, ) from myia.classes import ADT from myia.ir import MultitypeGraph from myia.utils import EnvInstance, HandleInstance, dataclass_fields from myia.xtype import Bool, f16, f32, f64, i16, i32, i64, u64 B = Bool Bot = AbstractBottom() EmptyTuple = typing.Tuple[()] AA = AbstractArray(ANYTHING, {SHAPE: ANYTHING, TYPE: ANYTHING}) AN = AbstractArray(ANYTHING, {SHAPE: ANYTHING, TYPE: xtype.NDArray}) def arr_of(t, shp, value): return AbstractArray( AbstractScalar({VALUE: value, TYPE: t}), {SHAPE: shp, TYPE: xtype.NDArray}, ) def ai64_of(*shp, value=ANYTHING): return arr_of(i64, shp, value)
MIT License
tensorflow/similarity
tensorflow_similarity/samplers/samplers.py
Sampler.__len__
python
def __len__(self) -> int: return self.steps_per_epoch
Return the number of batch per epoch
https://github.com/tensorflow/similarity/blob/cf70eab8190b96d6c9a1eca77d2d6ddaf40920ae/tensorflow_similarity/samplers/samplers.py#L125-L127
import abc from typing import Any, Callable, Optional, Tuple from tensorflow.keras.utils import Sequence from tensorflow_similarity.types import FloatTensor, IntTensor Augmenter = Callable[[FloatTensor, IntTensor, int, bool], Tuple[FloatTensor, IntTensor]] Scheduler = Callable[[Any], Any] class Sampler(Sequence, metaclass=abc.ABCMeta): def __init__( self, classes_per_batch: int, examples_per_class_per_batch: int = 2, num_augmentations_per_example: int = 0, steps_per_epoch: int = 1000, augmenter: Optional[Augmenter] = None, warmup: int = 0, ) -> None: self.epoch = 0 self.classes_per_batch = classes_per_batch self.examples_per_class_per_batch = examples_per_class_per_batch self.num_augmentations_per_example = num_augmentations_per_example self.batch_size = classes_per_batch * examples_per_class_per_batch self.aug_size = (self.batch_size * (1 + num_augmentations_per_example)) - self.batch_size self.steps_per_epoch = steps_per_epoch self.augmenter = augmenter self.warmup = warmup self.is_warmup = True if warmup else False print(f"\nThe initial batch size is {self.batch_size + self.aug_size} " f"({self.classes_per_batch} classes * " f"{self.examples_per_class_per_batch} examples per class) with " f"{self.num_augmentations_per_example} augmenters") @abc.abstractmethod def _get_examples(self, batch_id: int, num_classes: int, examples_per_class: int) -> Tuple[FloatTensor, IntTensor]:
Apache License 2.0
tmancal74/quantarhei
quantarhei/spectroscopy/pathwayanalyzer.py
get_TwoDSpectrum_from_pathways
python
def get_TwoDSpectrum_from_pathways(pathways, t1axis, t3axis): from .mocktwodcalculator import MockTwoDSpectrumCalculator t2axis = TimeAxis(0.0,1,1.0) mcalc = MockTwoDSpectrumCalculator(t1axis, t2axis, t3axis) mcalc.bootstrap(rwa = convert(12000.0,"1/cm","int"), pathways=pathways) twod = mcalc.calculate() return twod
Returns a 2D spectrum calculated based on submitted Liouville pathways
https://github.com/tmancal74/quantarhei/blob/54a40cc55cdedf86bf04a5d705227fe69461d408/quantarhei/spectroscopy/pathwayanalyzer.py#L663-L673
import os import numpy from ..core.managers import UnitsManaged, Manager from ..core.wrappers import deprecated from ..core.units import cm2int from ..core.units import convert from ..core.parcel import load_parcel from .. import REAL, COMPLEX from ..core.time import TimeAxis from ..core.dfunction import DFunction class LiouvillePathwayAnalyzer(UnitsManaged): def __init__(self, pathways=None): self.pathways = pathways def set_pathways(self, pathways): self.pathways = pathways def get_pathways(self): return self.pathways def get_number_of_pathways(self): return(len(self.pathways)) @deprecated def max_pref(self, pathways): if pathways is None: pthways = self.pathways else: pthways = pathways pmax = 0.0 k = 0 rec = -1 for pway in pthways: if pway.pref > pmax: rec = k pmax = pway.pref k += 1 return (pmax, rec) def max_amplitude(self): return max_amplitude(self.pathways) @deprecated def select_pref_GT(self, val, pathways=None, replace=True, verbose=False): if pathways is None: pthways = self.pathways else: pthways = pathways selected = [] for pway in pthways: if numpy.abs(pway.pref) > val: selected.append(pway) if verbose: print("Selected", len(selected), "pathways") if (pathways is None) and replace: self.pathways = selected return selected def select_amplitude_GT(self, val, replace=True, verbose=False): selected = select_amplitude_GT(val, self.pathways, verbose=verbose) if replace: self.pathways = selected else: return selected def select_frequency_window(self, window, replace=True, verbose=False): selected = select_frequency_window(window, self.pathways, verbose) if replace: self.pathways = selected else: return selected def select_omega2(self, interval, replace=True, verbose=False): selected = select_omega2(interval, self.pathways, verbose) if replace: self.pathways = selected else: return selected @deprecated def order_by_pref(self, pthways): lst = sorted(pthways, key=lambda pway: abs(pway.pref), reverse=True) return lst def order_by_amplitude(self, replace=True): orderred = order_by_amplitude(self.pathways) if replace: self.pathways = orderred else: return orderred def select_sign(self, sign, replace=True): selected = select_sign(self.pathways, sign) if replace: self.pathways = selected else: return selected def select_type(self, ptype="REPH", replace=True): selected = select_type(self.pathways, ptype) if replace: self.pathways = selected else: return selected return selected def max_amplitude(pathways): pmax = 0.0 k = 0 rec = -1 for pway in pathways: if pway.pref > pmax: rec = k pmax = pway.pref k += 1 return (pmax, rec) def select_amplitude_GT(val, pathways, verbose=False): pthways = pathways selected = [] for pway in pthways: if numpy.abs(pway.pref) > val: selected.append(pway) if verbose: print("Selected", len(selected), "pathways") return selected def select_frequency_window(window, pathways, verbose=False): pthways = pathways m = Manager() om1_low = m.convert_energy_2_internal_u(window[0]) om1_upp = m.convert_energy_2_internal_u(window[1]) om3_low = m.convert_energy_2_internal_u(window[2]) om3_upp = m.convert_energy_2_internal_u(window[3]) selected = [] for pway in pthways: ne = len(pway.frequency) om1 = numpy.abs(pway.get_interval_frequency(0)) om3 = numpy.abs(pway.get_interval_frequency(ne-2)) if (((om1 >= om1_low) and (om1 <= om1_upp)) and ((om3 >= om3_low) and (om3 <= om3_upp))): selected.append(pway) if verbose: print("Selected", len(selected), "pathways") return selected def select_omega2(interval, pathways, secular=True, tolerance=10.0*cm2int, verbose=False): pthways = pathways m = Manager() om2_low = m.convert_energy_2_internal_u(interval[0]) om2_upp = m.convert_energy_2_internal_u(interval[1]) selected = [] for pway in pthways: ne = len(pway.frequency) om2 = pway.get_interval_frequency(ne-3) if ne > 4: om2_2 = pway.get_interval_frequency(ne-4) if secular: if numpy.abs(om2_2) <= tolerance: if (om2 >= om2_low) and (om2 <= om2_upp): selected.append(pway) elif numpy.abs(om2 - om2_2) <= tolerance: if (om2 >= om2_low) and (om2 <= om2_upp): selected.append(pway) else: if (om2 >= om2_low) and (om2 <= om2_upp): selected.append(pway) else: if (om2 >= om2_low) and (om2 <= om2_upp): selected.append(pway) if verbose: print("Selected", len(selected), "pathways") return selected def order_by_amplitude(pthways): lst = sorted(pthways, key=lambda pway: abs(pway.pref), reverse=True) return lst def select_sign(pathways, sign): selected = [] pos = False if sign > 0.0: pos = True for pway in pathways: if pos: if pway.sign > 0.0: selected.append(pway) else: if pway.sign < 0.0: selected.append(pway) return selected def select_type(pathways, stype): di = dict(REPH="R", NONR="NR") selected = [] for pw in pathways: if pw.pathway_type == di[stype]: selected.append(pw) return selected def select_by_states(pathways, states): for pw in pathways: ch = -1 for k in range(pw.states.shape[0]): if not ((pw.states[k, 0] == states[k+1][0]) & (pw.states[k, 1] == states[k+1][1])): break ch += 1 if ch == k: return pw def look_for_pathways(name="pathways", ext="qrp", check=False, directory="."): import glob import os.path path = os.path.join(directory,name+"_*."+ext) files = glob.glob(path) t2s = [] for fl in files: t2 = float(fl.split("_")[1].split("."+ext)[0]) t2s.append(t2) t2s = numpy.array(t2s) t2s = numpy.sort(t2s) if check: pass return t2s def load_pathways_by_t2(t2, name="pathways", ext="qrp", directory=".", tag_type=REAL): t2_str = str(t2) fname = name+"_"+t2_str+"."+ext path = os.path.join(directory, fname) try: pw = load_parcel(path) except: print("Error while loading") return [] return pw def save_pathways_by_t2(t2, name="pathways", ext="qrp", directory=".", tag_type=REAL): pass def get_evolution_from_saved_pathways(states, name="pathways", ext="qrp", directory=".", tag_type=REAL, repl=0.0): t2s = look_for_pathways(name=name, ext=ext, directory=directory) t2s = numpy.sort(t2s) evol = _get_evol(t2s, states, name, ext, directory, repl=repl) dt = t2s[1] - t2s[0] length = len(t2s) taxis = TimeAxis(t2s[0], length, dt) ii = 0 for tt in t2s: if tt != taxis.data[ii]: raise Exception("The set of available times"+ " does not correspond to a continuous time axis") ii += 1 return DFunction(x=taxis, y=evol) def get_prefactors_from_saved_pathways(states, name="pathways", ext="qrp", directory=".", tag_type=REAL, repl=0.0): t2s = look_for_pathways(name=name, ext=ext, directory=directory) t2s = numpy.sort(t2s) evol = _get_pref(t2s, states, name, ext, directory, repl=repl) dt = t2s[1] - t2s[0] length = len(t2s) taxis = TimeAxis(t2s[0], length, dt) ii = 0 for tt in t2s: if tt != taxis.data[ii]: raise Exception("The set of available times"+ " does not correspond to a continuous time axis") ii += 1 return DFunction(x=taxis, y=evol) def get_TwoDSpectrum_from_saved_pathways(t2, t1axis, t3axis, name="pathways", ext="qrp", directory=".", tag_type=REAL): pwt2 = load_pathways_by_t2(t2, name=name, ext=ext, directory=directory, tag_type=tag_type) twod = get_TwoDSpectrum_from_pathways(pwt2, t1axis, t3axis) return twod
MIT License
mdenolle/noisepy
test/data_check/combine_miniseed_stream.py
check_sample
python
def check_sample(stream): if len(stream)==0: return stream else: freqs = [] for tr in stream: freqs.append(tr.stats.sampling_rate) freq = max(freqs) for tr in stream: if tr.stats.sampling_rate != freq: stream.remove(tr) return stream
Returns sampling rate of traces in stream. :type stream:`~obspy.core.stream.Stream` object. :param stream: Stream containing one or more day-long trace :return: List of sampling rates in stream
https://github.com/mdenolle/noisepy/blob/69f0e151b04740aa9a1648ef3b6ef53664398683/test/data_check/combine_miniseed_stream.py#L246-L267
import os import sys import glob import obspy import datetime import numpy as np import scipy.signal import noise_module from numba import jit import matplotlib.pyplot as plt from obspy.signal.filter import bandpass,lowpass from obspy.signal.util import _npts2nfft sys.path.insert(1,'../../src') import noise_module def preprocess_raw(st,downsamp_freq,clean_time=True,pre_filt=None,resp=False,respdir=None): if len(st) > 100 or portion_gaps(st) > 0.5: print('Too many traces or gaps in Stream: Continue!') st=[] return st st = check_sample(st) if len(st) == 0: print('No traces in Stream: Continue!') return st for ii in range(len(st)): st[ii].data = np.float32(st[ii].data) st[ii].data = scipy.signal.detrend(st[ii].data,type='constant') st[ii].data = scipy.signal.detrend(st[ii].data,type='linear') st.merge(method=1,fill_value=0) sps = st[0].stats.sampling_rate if abs(downsamp_freq-sps) > 1E-4: st[0].data = bandpass(st[0].data,0.005,0.4*downsamp_freq,df=sps,corners=4,zerophase=True) st.interpolate(downsamp_freq,method='weighted_average_slopes') delta = st[0].stats.delta fric = st[0].stats.starttime.microsecond%(delta*1E6) if fric>1E-4: st[0].data = segment_interpolate(np.float32(st[0].data),float(fric/delta*1E6)) st[0].stats.starttime-=(fric*1E-6) station = st[0].stats.station if resp is not False: if resp != 'inv': if (respdir is None) or (not os.path.isdir(respdir)): raise ValueError('response file folder not found! abort!') if resp == 'inv': if not st[0].stats.response: raise ValueError('no response found in the inventory! abort!') else: print('removing response using inv') st.remove_response(output="VEL",pre_filt=pre_filt,water_level=60) elif resp == 'spectrum': print('remove response using spectrum') specfile = glob.glob(os.path.join(respdir,'*'+station+'*')) if len(specfile)==0: raise ValueError('no response sepctrum found for %s' % station) st = resp_spectrum(st[0],specfile[0],downsamp_freq) elif resp == 'RESP_files': print('using RESP files') seedresp = glob.glob(os.path.join(respdir,'RESP.'+station+'*')) if len(seedresp)==0: raise ValueError('no RESP files found for %s' % station) st.simulate(paz_remove=None,pre_filt=pre_filt,seedresp=seedresp) elif resp == 'polozeros': print('using polos and zeros') paz_sts = glob.glob(os.path.join(respdir,'*'+station+'*')) if len(paz_sts)==0: raise ValueError('no polozeros found for %s' % station) st.simulate(paz_remove=paz_sts,pre_filt=pre_filt) else: raise ValueError('no such option of resp in preprocess_raw! please double check!') if clean_time: st = clean_daily_segments(st) return st def portion_gaps(stream): pgaps=0 npts = (stream[-1].stats.endtime-stream[0].stats.starttime)*stream[0].stats.sampling_rate if len(stream)==0: return pgaps else: for ii in range(len(stream)-1): pgaps += (stream[ii+1].stats.starttime-stream[ii].stats.endtime)*stream[ii].stats.sampling_rate return pgaps/npts @jit('float32[:](float32[:],float32)') def segment_interpolate(sig1,nfric): npts = len(sig1) sig2 = np.zeros(npts,dtype=np.float32) for ii in range(npts): if ii==0 or ii==npts: sig2[ii]=sig1[ii] else: sig2[ii]=(1-nfric)*sig1[ii+1]+nfric*sig1[ii] return sig2 def resp_spectrum(source,resp_file,downsamp_freq): respz = np.load(resp_file) nrespz= respz[1][:] spec_freq = max(respz[0]) nfft = _npts2nfft(source.stats.npts) sps = source.stats.sample_rate if spec_freq < 0.5*sps: raise ValueError('spectrum file has peak freq smaller than the data, abort!') else: indx = np.where(respz[0]<=0.5*sps) nfreq = np.linspace(0,0.5*sps,nfft) nrespz= np.interp(nfreq,respz[0][indx],respz[1][indx]) source_spect = np.fft.rfft(source.data,n=nfft) source_spect *= nrespz source.data = np.fft.irfft(source_spect)[0:source.stats.npts] return source def clean_daily_segments(tr): stream_time = tr[0].stats.starttime time0 = obspy.UTCDateTime(stream_time.year,stream_time.month,stream_time.day,0,0,0) time1 = obspy.UTCDateTime(stream_time.year,stream_time.month,stream_time.day,12,0,0) time2 = time1+datetime.timedelta(hours=12) if stream_time <= time1: starttime=time0 else: starttime=time2 ndays = round((tr[0].stats.endtime-starttime)/(time2-time0)) if ndays==0: tr=[] return tr else: ntr = obspy.Stream() ttr = tr[0].copy() for ii in range(ndays): tr[0] = ttr.copy() endtime = starttime+datetime.timedelta(days=1) tr[0].trim(starttime=starttime,endtime=endtime,pad=True,fill_value=0) ntr.append(tr[0]) starttime = endtime return ntr
MIT License
the-zebulan/codewars
katas/kyu_6/string_searching_with_wildcard.py
find
python
def find(needle, haystack): compiled = re.compile(re.escape(needle).replace("\\_", "\S")) searched = re.search(compiled, haystack) return searched.start() if searched else -1
Solution from 'knight07' on CodeWars
https://github.com/the-zebulan/codewars/blob/1eafd1247d60955a5dfb63e4882e8ce86019f43a/katas/kyu_6/string_searching_with_wildcard.py#L15-L19
import re
MIT License
unixsurfer/haproxystats
haproxystats/utils.py
load_file_content
python
def load_file_content(filename): commented = re.compile(r'\s*?#') try: with open(filename, 'r') as _file: _content = [line.strip() for line in _file.read().splitlines() if not commented.match(line)] except OSError as exc: log.error('failed to read %s:%s', filename, exc) return [] else: return _content
Build list from the content of a file. Arguments: filename (str): A absolute path of a filename Returns: A list
https://github.com/unixsurfer/haproxystats/blob/3ef4b3cacada9b6ed52dcc7726d8dad81a821ed1/haproxystats/utils.py#L95-L114
import os import stat from collections import defaultdict, deque from functools import wraps import io import socket import shutil import logging import time import configparser import glob import re from urllib.parse import urlparse import pyinotify import pandas from haproxystats.metrics import (MetricNamesPercentage, FRONTEND_METRICS, BACKEND_METRICS, BACKEND_AVG_METRICS, BACKEND_AVG_TIME_METRICS, SERVER_METRICS, SERVER_AVG_METRICS, SERVER_AVG_TIME_METRICS) log = logging.getLogger('root') FILE_SUFFIX_INFO = '_info' FILE_SUFFIX_STAT = '_stat' CMD_SUFFIX_MAP = {'info': FILE_SUFFIX_INFO, 'stat': FILE_SUFFIX_STAT} OPTIONS_TYPE = { 'paths': { 'base-dir': 'get', }, 'pull': { 'loglevel': 'get', 'retries': 'getint', 'timeout': 'getfloat', 'interval': 'getfloat', 'pull-timeout': 'getfloat', 'pull-interval': 'getint', 'buffer-limit': 'getint', 'dst-dir': 'get', 'tmp-dst-dir': 'get', 'workers': 'getint', 'queue-size': 'getint', }, 'process': { 'workers': 'getint', 'src-dir': 'get', 'aggr-server-metrics': 'getboolean', 'per-process-metrics': 'getboolean', 'calculate-percentages': 'getboolean', 'liveness-check-interval': 'getfloat', }, 'graphite': { 'server': 'get', 'port': 'getint', 'retries': 'getint', 'interval': 'getfloat', 'connect-timeout': 'getfloat', 'write-timeout': 'getfloat', 'delay': 'getfloat', 'backoff': 'getfloat', 'namespace': 'get', 'prefix-hostname': 'getboolean', 'fqdn': 'getboolean', 'queue-size': 'getint', }, 'local-store': { 'dir': 'get', }, } VALID_TCP_SOCKETS = [ 'tcp', 'unix', ] class BrokenConnection(Exception): def __init__(self, raised): self.raised = raised super().__init__()
Apache License 2.0
jimmysong/lepton
helper.py
decode_bech32
python
def decode_bech32(s): hrp, raw_data = s.encode('ascii').split(b'1') data = [BECH32_ALPHABET.index(c) for c in raw_data] if not bech32_verify_checksum(hrp, data): raise ValueError('bad address: {}'.format(s)) version = data[0] number = 0 for digit in data[1:-6]: number = (number << 5) + digit num_bytes = (len(data) - 7) * 5 // 8 bits_to_ignore = (len(data) - 7) * 5 % 8 number >>= bits_to_ignore witness = number.to_bytes(num_bytes, 'big') if version == 0: version_byte = b'\x00' else: version_byte = encode_varint(version + 0x50) if num_bytes < 2 or num_bytes > 40: raise ValueError('bytes out of range: {}'.format(num_bytes)) length_byte = encode_varint(num_bytes) return version_byte + length_byte + bytes(witness)
Convert a bech32 address to a witness program
https://github.com/jimmysong/lepton/blob/064bf1c3bcfed5f5799f8dfd83f85ed30349f6a5/helper.py#L173-L194
from csiphash import siphash24 from io import BytesIO from unittest import SkipTest, TestCase, TestSuite, TextTestRunner import hashlib SIGHASH_ALL = 1 SIGHASH_NONE = 2 SIGHASH_SINGLE = 3 BASE58_ALPHABET = b'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' BECH32_ALPHABET = b'qpzry9x8gf2tvdw0s3jn54khce6mua7l' GEN = [0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3] TWO_WEEKS = 60 * 60 * 24 * 14 MAX_TARGET = 0xffff * 256**(0x1d - 3) def bytes_to_str(b, encoding='ascii'): return b.decode(encoding) def str_to_bytes(s, encoding='ascii'): return s.encode(encoding) def little_endian_to_int(b): return int.from_bytes(b, 'little') def int_to_little_endian(n, length): return n.to_bytes(length, 'little') def hash160(s): return hashlib.new('ripemd160', hashlib.sha256(s).digest()).digest() def sha256(s): return hashlib.sha256(s).digest() def hash256(s): return hashlib.sha256(hashlib.sha256(s).digest()).digest() def encode_base58(s): count = 0 for c in s: if c == 0: count += 1 else: break prefix = b'1' * count num = int.from_bytes(s, 'big') result = bytearray() while num > 0: num, mod = divmod(num, 58) result.insert(0, BASE58_ALPHABET[mod]) return prefix + bytes(result) def encode_base58_checksum(raw): checksum = hash256(raw)[:4] base58 = encode_base58(raw + checksum) return base58.decode('ascii') def raw_decode_base58(s, num_bytes): if type(s) == str: b = s.encode('ascii') else: b = s num = 0 for c in b: num *= 58 num += BASE58_ALPHABET.index(c) combined = num.to_bytes(num_bytes, 'big') checksum = combined[-4:] if hash256(combined[:-4])[:4] != checksum: raise ValueError('bad checksum {} != {}'.format( hash256(combined[:-4])[:4].hex(), checksum.hex())) return combined[:-4] def decode_base58(s): raw = raw_decode_base58(s, 25) return raw[1:] def bech32_polymod(values): chk = 1 for v in values: b = (chk >> 25) chk = (chk & 0x1ffffff) << 5 ^ v for i in range(5): chk ^= GEN[i] if ((b >> i) & 1) else 0 return chk def bech32_hrp_expand(s): return [x >> 5 for x in s] + [0] + [x & 31 for x in s] def bech32_verify_checksum(hrp, data): return bech32_polymod(bech32_hrp_expand(hrp) + data) == 1 def bech32_create_checksum(hrp, data): values = bech32_hrp_expand(hrp) + data polymod = bech32_polymod(values + [0, 0, 0, 0, 0, 0]) ^ 1 return [(polymod >> 5 * (5 - i)) & 31 for i in range(6)] def group_32(s): result = [] unused_bits = 0 current = 0 for c in s: unused_bits += 8 current = current * 256 + c while unused_bits > 5: unused_bits -= 5 result.append(current >> unused_bits) mask = (1 << unused_bits) - 1 current &= mask result.append(current << (5 - unused_bits)) return result def encode_bech32(nums): return bytes([BECH32_ALPHABET[n] for n in nums]) def encode_bech32_checksum(s, testnet=False): if testnet: prefix = b'tb' else: prefix = b'bc' version = s[0] if version > 0: version -= 0x50 length = s[1] data = [version] + group_32(s[2:2 + length]) checksum = bech32_create_checksum(prefix, data) bech32 = encode_bech32(data + checksum) result = prefix + b'1' + bech32 return result.decode('ascii')
MIT License
googleapis/python-bigquery-connection
google/cloud/bigquery_connection_v1/services/connection_service/client.py
ConnectionServiceClient.from_service_account_info
python
def from_service_account_info(cls, info: dict, *args, **kwargs): credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials return cls(*args, **kwargs)
Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: ConnectionServiceClient: The constructed client.
https://github.com/googleapis/python-bigquery-connection/blob/a13f505f46937db367d9dde2c89c06f9636b2fd4/google/cloud/bigquery_connection_v1/services/connection_service/client.py#L118-L132
from collections import OrderedDict from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.transport import mtls from google.auth.transport.grpc import SslCredentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.cloud.bigquery_connection_v1.services.connection_service import pagers from google.cloud.bigquery_connection_v1.types import connection from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from google.protobuf import field_mask_pb2 from .transports.base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import ConnectionServiceGrpcTransport from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport class ConnectionServiceClientMeta(type): _transport_registry = ( OrderedDict() ) _transport_registry["grpc"] = ConnectionServiceGrpcTransport _transport_registry["grpc_asyncio"] = ConnectionServiceGrpcAsyncIOTransport def get_transport_class( cls, label: str = None, ) -> Type[ConnectionServiceTransport]: if label: return cls._transport_registry[label] return next(iter(cls._transport_registry.values())) class ConnectionServiceClient(metaclass=ConnectionServiceClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint mtls_endpoint_re = re.compile( r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?" ) m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() if mtls or not googledomain: return api_endpoint if sandbox: return api_endpoint.replace( "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") DEFAULT_ENDPOINT = "bigqueryconnection.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( DEFAULT_ENDPOINT ) @classmethod
Apache License 2.0
openstack/manila
manila/api/views/share_replicas.py
ReplicationViewBuilder.summary_list
python
def summary_list(self, request, replicas): return self._list_view(self.summary, request, replicas)
Summary view of a list of replicas.
https://github.com/openstack/manila/blob/34d209484366cd921e052d37c5f9daef5e97af20/manila/api/views/share_replicas.py#L29-L31
from manila.api import common class ReplicationViewBuilder(common.ViewBuilder): _collection_name = 'share_replicas' _collection_links = 'share_replica_links' _detail_version_modifiers = [ "add_cast_rules_to_readonly_field", ]
Apache License 2.0
tebeka/pythonwise
bq_flow.py
view
python
def view(deps): tmp = NamedTemporaryFile() tmp.write(''' digraph G { graph[rankdir=LR]; node[shape=box]; edge[color=blue]; '''.encode('utf-8')) for src, dest in deps: line = '"{}" -> "{}";'.format(src, dest) tmp.write(line.encode('utf-8')) tmp.write('}\n'.encode('utf-8')) tmp.flush() tmp.seek(0, SEEK_SET) png = '{}.png'.format(tmp.name) check_call(['dot', '-Tpng', '-o', png], stdin=tmp) cmd = { 'win32': 'start', 'darwin': 'open', }.get(platform, 'xdg-open') check_call([cmd, png])
Generate image of dependency graph and shows it. Requires "dot" utility.
https://github.com/tebeka/pythonwise/blob/3bc3735160a1761be83751126f99180ed353b2e4/bq_flow.py#L44-L69
from collections import defaultdict from glob import iglob from os import path, SEEK_SET from subprocess import check_call from sys import platform from tempfile import NamedTemporaryFile import re def file2table(fname): return path.basename(fname)[:-4] def iter_deps(root): for sql_file in iglob('{}/*.sql'.format(root)): dest = file2table(sql_file) with open(sql_file) as fp: for line in fp: for src in re.findall('`([^`]+)`', line): if '.' not in src: msg = 'table without dataset - "{}"'.format(src) raise ValueError(msg) src = src[src.rfind('.')+1:] yield src, dest
BSD 3-Clause New or Revised License
kotori-y/scopy
scopy/ScoDruglikeness/molproperty_Lib.py
PC_properties.GetProperties
python
def GetProperties(self, items = ['MW','Vol','Dense','fChar','nBond','nAtom','nHD','nHA','nHB', 'nHet','nStereo','nHev','nRot','nRig','nRing','Flex', 'logP','logD','pKa','logSw','ab','MR','TPSA','AP','HetRatio', 'Fsp3','MaxRing','QEDmean','QEDmax','QEDnone','SAscore','NPscore', 'nSingle','nDouble','nTriple','nC','nB','nF','nCl','nBr','nI', 'nP','nS','nO','nN' ], showSMILES = False): funcl = {'MW': 'self.CalculateMolWeight()', 'Vol': 'self.CalculateMolVolume()', 'Dense': 'self.CalculateMolDensity()', 'fChar': 'self.CalculateMolFCharge()', 'nBond': 'self.CalculateNumBonds()', 'nAtom': 'self.CalculateNumAtoms()', 'nHet': 'self.CalculateNumHetero()', 'nRot': 'self.CalculateNumRotatableBonds()', 'nRig': 'self.CalculateNumRigidBonds()', 'nRing': 'self.CalculateNumRing()', 'nHev': 'self.CalculateNumHeavyAtom()', 'logP': 'self.CalculateLogP()', 'logD': 'self.CalculateLogD()', 'pKa': 'self.CalculatepKa()', 'ab': 'self.CheckAcid()', 'MR': 'self.CalculateMolMR()', 'nHD': 'self.CalculateNumHDonors()', 'nHA': 'self.CalculateNumHAcceptors()', 'nHB': 'self.CalculateNumHyBond()', 'AP': 'self.CalculateAromaticProportion()', 'logSw': 'self.CalculateLogSw()', 'Fsp3': 'self.CalculateFsp3()', 'TPSA': 'self.CalculateTPSA()', 'MaxRing': 'self.CalculateMaxSizeSystemRing()', 'nStereo': 'self.CalculateNumStereocenters()', 'Flex': 'self.CalculateFlexibility()', 'HetRatio': 'self.CalculateHetCarbonRatio()', 'QEDmean': 'self.CalculateQEDmean()', 'QEDmax': 'self.CalculateQEDmax()', 'QEDnone': 'self.CalculateQEDnone()', 'SAscore': 'self.CalculateSAscore()', 'NPscore': 'self.CalculateNPscore()', 'nSingle': 'self.CalculateNumSinBond()', 'nDouble': 'self.CalculateNumDouBond()', 'nTriple': 'self.CalculateNumTriBond()', 'nC': 'self.CalculateNumCarbon()', 'nB': 'self.CalculateNumBoron()', 'nF': 'self.CalculateNumFluorin()', 'nCl': 'self.CalculateNumChlorin()', 'nBr': 'self.CalculateNumBromine()', 'nI': 'self.CalculateNumIodine()', 'nP': 'self.CalculateNumPhosphor()', 'nS': 'self.CalculateNumSulfur()', 'nO': 'self.CalculateNumOxygen()', 'nN': 'self.CalculateNumNitrogen()',} vals = [] for item in items: val = eval(funcl[item]) vals.append(val) if showSMILES: pool = Pool(self.n_jobs) smis = pool.map_async(_GetSmi, self.mols).get() pool.close() pool.join() items.insert(0, 'SMILES') vals.insert(0, smis) return dict(zip(items, vals))
Get all PC - properties in scopy
https://github.com/kotori-y/scopy/blob/b15e51a13507b5283888da90548bd50f7df5c50c/scopy/ScoDruglikeness/molproperty_Lib.py#L920-L994
import os import csv from multiprocessing import Pool from rdkit import Chem from . import molproperty from .. import ScoConfig from ..ScoRepresent.fingerprints import CalculateGhoseCrippen def _GetSmi(mol): return Chem.MolToSmiles(mol) class PC_properties(object): def __init__(self, mols, n_jobs=1): self.mols = mols if type(mols) is not Chem.rdchem.Mol else [mols] self.n_jobs = n_jobs if n_jobs>=1 else None def CalculateMolWeight(self): pool = Pool(self.n_jobs) MW = pool.map_async(molproperty.CalculateMolWeight, self.mols).get() pool.close() pool.join() return MW def CalculateNumBonds(self): pool = Pool(self.n_jobs) nBond = pool.map_async(molproperty.CalculateNumBonds, self.mols).get() pool.close() pool.join() return nBond def CalculateNumAtoms(self): pool = Pool(self.n_jobs) nAtom = pool.map_async(molproperty.CalculateNumAtoms, self.mols).get() pool.close() pool.join() return nAtom def CalculateNumHetero(self): pool = Pool(self.n_jobs) nHet = pool.map_async(molproperty.CalculateNumHetero, self.mols).get() pool.close() pool.join() return nHet def CalculateNumRotatableBonds(self): pool = Pool(self.n_jobs) nRot = pool.map_async(molproperty.CalculateNumRotatableBonds, self.mols).get() pool.close() pool.join() return nRot def CalculateNumRigidBonds(self): pool = Pool(self.n_jobs) nRig = pool.map_async(molproperty.CalculateNumRigidBonds, self.mols).get() pool.close() pool.join() return nRig def CalculateFlexibility(self): pool = Pool(self.n_jobs) Flex = pool.map_async(molproperty.CalculateFlexibility, self.mols).get() pool.close() pool.join() return Flex def CalculateNumRing(self): pool = Pool(self.n_jobs) nRing = pool.map_async(molproperty.CalculateNumRing, self.mols).get() pool.close() pool.join() return nRing def CalculateNumHeavyAtom(self): pool = Pool(self.n_jobs) nHev = pool.map_async(molproperty.CalculateNumHeavyAtom, self.mols).get() pool.close() pool.join() return nHev def CalculateLogD(self): intercept = 0.5748907159915493 fps = CalculateGhoseCrippen(self.mols,self.n_jobs) with open(os.path.join(ScoConfig.CrippenDir, 'Crippen.txt')) as f_obj: lines = csv.reader(f_obj,delimiter='\t') next(lines) contri = [x[-1] for x in lines] contri = [float(x) for x in contri] f_obj.close() logD = (fps*contri).sum(axis=1) + intercept return list(logD) def CalculateLogP(self): pool = Pool(self.n_jobs) logp = pool.map_async(molproperty.CalculateLogP, self.mols).get() pool.close() pool.join() return logp def CheckAcid(self): pool = Pool(self.n_jobs) ab = pool.map_async(molproperty.CheckAcid, self.mols).get() pool.close() pool.join() return ab def CalculatepKa(self): import warnings warnings.filterwarnings('ignore') from math import log10 logDl = self.CalculateLogD() logPl = self.CalculateLogP() statusl = self.CheckAcid() res = [] for status,logP, logD in zip(statusl,logPl,logDl): try: if status == 'acid': pKa = 7.4 - log10(10**(logP-logD)-1) else: pKa = log10(10**(logP-logD)-1) - 7.4 res.append(pKa) except: res.append('N/A') return res def CalculateMolMR(self): pool = Pool(self.n_jobs) mr = pool.map_async(molproperty.CalculateMolMR, self.mols).get() pool.close() pool.join() return mr def CalculateNumHDonors(self): pool = Pool(self.n_jobs) nHD = pool.map_async(molproperty.CalculateNumHDonors, self.mols).get() pool.close() pool.join() return nHD def CalculateNumHAcceptors(self): pool = Pool(self.n_jobs) nHA = pool.map_async(molproperty.CalculateNumHAcceptors, self.mols).get() pool.close() pool.join() return nHA def CalculateNumHyBond(self): pool = Pool(self.n_jobs) nHB = pool.map_async(molproperty.CalculateNumHyBond, self.mols).get() pool.close() pool.join() return nHB def CalculateAromaticProportion(self): pool = Pool(self.n_jobs) aroma = pool.map_async(molproperty.CalculateAromaticProportion, self.mols).get() pool.close() pool.join() return aroma def CalculateLogSw(self): pool = Pool(self.n_jobs) logSw = pool.map_async(molproperty.CalculateLogSw, self.mols).get() pool.close() pool.join() return logSw def CalculateFsp3(self): pool = Pool(self.n_jobs) fsp3 = pool.map_async(molproperty.CalculateFsp3, self.mols).get() pool.close() pool.join() return fsp3 def CalculateTPSA(self): pool = Pool(self.n_jobs) tpsa = pool.map_async(molproperty.CalculateTPSA, self.mols).get() pool.close() pool.join() return tpsa def CalculateQEDmean(self): pool = Pool(self.n_jobs) qed_mean = pool.map_async(molproperty.CalculateQEDmean, self.mols).get() pool.close() pool.join() return qed_mean def CalculateQEDmax(self): pool = Pool(self.n_jobs) qed_max = pool.map_async(molproperty.CalculateQEDmax, self.mols).get() pool.close() pool.join() return qed_max def CalculateQEDnone(self): pool = Pool(self.n_jobs) qed_none = pool.map_async(molproperty.CalculateQEDnone, self.mols).get() pool.close() pool.join() return qed_none def CalculateMaxSizeSystemRing(self): pool = Pool(self.n_jobs) maxring = pool.map_async(molproperty.CalculateMaxSizeSystemRing, self.mols).get() pool.close() pool.join() return maxring def CalculateNumStereocenters(self): nStereo = map(molproperty.CalculateNumStereocenters, self.mols) return list(nStereo) def CalculateNumCarbon(self): pool = Pool(self.n_jobs) nC = pool.map_async(molproperty.CalculateNumCarbon, self.mols).get() pool.close() pool.join() return nC def CalculateNumBoron(self): pool = Pool(self.n_jobs) nB = pool.map_async(molproperty.CalculateNumBoron, self.mols).get() pool.close() pool.join() return nB def CalculateNumFluorin(self): pool = Pool(self.n_jobs) nF = pool.map_async(molproperty.CalculateNumFluorin, self.mols).get() pool.close() pool.join() return nF def CalculateNumChlorin(self): pool = Pool(self.n_jobs) nCl = pool.map_async(molproperty.CalculateNumChlorin, self.mols).get() pool.close() pool.join() return nCl def CalculateNumBromine(self): pool = Pool(self.n_jobs) nBr = pool.map_async(molproperty.CalculateNumBromine, self.mols).get() pool.close() pool.join() return nBr def CalculateNumIodine(self): pool = Pool(self.n_jobs) nI = pool.map_async(molproperty.CalculateNumIodine, self.mols).get() pool.close() pool.join() return nI def CalculateNumPhosphor(self): pool = Pool(self.n_jobs) nP = pool.map_async(molproperty.CalculateNumPhosphor, self.mols).get() pool.close() pool.join() return nP def CalculateNumSulfur(self): pool = Pool(self.n_jobs) nS = pool.map_async(molproperty.CalculateNumSulfur, self.mols).get() pool.close() pool.join() return nS def CalculateNumOxygen(self): pool = Pool(self.n_jobs) nO = pool.map_async(molproperty.CalculateNumOxygen, self.mols).get() pool.close() pool.join() return nO def CalculateNumNitrogen(self): pool = Pool(self.n_jobs) nN = pool.map_async(molproperty.CalculateNumNitrogen, self.mols).get() pool.close() pool.join() return nN def CalculateNumChargedGroups(self): pass def CalculateHetCarbonRatio(self): pool = Pool(self.n_jobs) HetRatio = pool.map_async(molproperty.CalculateHetCarbonRatio, self.mols).get() pool.close() pool.join() return HetRatio def CalculateSAscore(self): pool = Pool(self.n_jobs) SA = pool.map_async(molproperty.CalculateSAscore, self.mols).get() pool.close() pool.join() return SA def CalculateNPscore(self): pool = Pool(self.n_jobs) NP = pool.map_async(molproperty.CalculateNPscore, self.mols).get() pool.close() pool.join() return NP def CalculateMolVolume(self): pool = Pool(self.n_jobs) mv = pool.map_async(molproperty.CalculateMolVolume, self.mols).get() pool.close() pool.join() return mv def CalculateMolDensity(self): pool = Pool(self.n_jobs) md = pool.map_async(molproperty.CalculateMolDensity, self.mols).get() pool.close() pool.join() return md def CalculateMolFCharge(self): pool = Pool(self.n_jobs) fChar = pool.map_async(molproperty.CalculateMolFCharge, self.mols).get() pool.close() pool.join() return fChar def CalculateNumSinBond(self): pool = Pool(self.n_jobs) nSingle = pool.map_async(molproperty.CalculateNumSinBond, self.mols).get() pool.close() pool.join() return nSingle def CalculateNumDouBond(self): pool = Pool(self.n_jobs) nDouble = pool.map_async(molproperty.CalculateNumDouBond, self.mols).get() pool.close() pool.join() return nDouble def CalculateNumTriBond(self): pool = Pool(self.n_jobs) nTriple = pool.map_async(molproperty.CalculateNumTriBond, self.mols).get() pool.close() pool.join() return nTriple
MIT License
saketkc/pysradb
pysradb/geoweb.py
GEOweb.download
python
def download(self, links, root_url, gse, verbose=False, out_dir=None): if out_dir is None: out_dir = os.path.join(os.getcwd(), "pysradb_downloads") out_dir = os.path.join(out_dir, gse) os.makedirs(out_dir, exist_ok=True) print("\nThe following files will be downloaded: \n") for link in links: print(link) print(os.linesep) tar_list = [i for i in links if ".tar" in i] if "filelist.txt" in links: tar_file = tar_list[0] if verbose: print(f"\nThe tar file {tar_file} contains the following files:\n") file_list_contents = requests.get( root_url + "filelist.txt" ).content.decode("utf-8") print(file_list_contents) for link in links: prefix = "" if link == "filelist.txt": prefix = gse + "_" geo_path = os.path.join(out_dir, prefix + link) download_file( root_url.lstrip("https://") + link, geo_path, show_progress=True )
Download GEO files. Parameters ---------- links: list List of all links valid downloadable present for a GEO ID root_url: string url for root directory for a GEO ID gse: string GEO ID verbose: bool Print file list out_dir: string Directory location for download
https://github.com/saketkc/pysradb/blob/bce1726813a104ff83eb1221679bf93074252af6/pysradb/geoweb.py#L57-L105
import gzip import os import re import requests import sys from lxml import html from .download import download_file from .geodb import GEOdb from .utils import _get_url from .utils import copyfileobj from .utils import get_gzip_uncompressed_size PY3 = True if sys.version_info[0] < 3: PY3 = False class GEOweb(GEOdb): def __init__(self): def get_download_links(self, gse): prefix = gse[:-3] url = f"https://ftp.ncbi.nlm.nih.gov/geo/series/{prefix}nnn/{gse}/suppl/" link_objects = html.fromstring(requests.get(url).content).xpath("//a") links = [i.attrib["href"] for i in link_objects] if "/" in links: raise KeyError(f"The provided GEO ID {gse} does not exist.") links = [i for i in links if "geo/series/" not in i] links = [i for i in links] return links, url
BSD 3-Clause New or Revised License
jameskmurphy/nes
nes/rom.py
ROM.load
python
def load(self, filename, ): with open(filename, "rb") as f: nesheader = f.read(16) self.decode_header(nesheader) if self.has_trainer: self.trainer_data = f.read(512) self.prg_rom_data = f.read(self.prg_rom_bytes) self.chr_rom_data = f.read(self.chr_rom_bytes) self.misc_rom_data = f.read() if len(self.misc_rom_data) > 0: print("WARNING: MISC ROM DATA IS NOT EMPTY")
Load a ROM in the standard .nes file format
https://github.com/jameskmurphy/nes/blob/d2fb20be164a766dbb6ad17f4cccb9518455fee0/nes/rom.py#L57-L70
import pyximport; pyximport.install() from .cycore.carts import NESCart0, NESCart1, NESCart2, NESCart4 from nes.pycore.carts import NESCart0 as pyNESCart0 from .pycore.bitwise import upper_nibble, lower_nibble, bit_low, bit_high class ROM: MIRROR_BIT = 0 PERSISTENT_BIT = 1 TRAINER_BIT = 2 MIRROR_IGNORE_BIT = 3 NES2_FORMAT_MASK = 0b00001100 MIRROR_HORIZONTAL = [0, 0, 1, 1] MIRROR_VERTICAL = [0, 1, 0, 1] MIRROR_FOUR_SCREEN = [0, 1, 2, 3] def __init__(self, filename, verbose=True, py_compatibility_mode=False): self.py_compatibility_mode = py_compatibility_mode self.prg_rom_bytes = None self.chr_rom_bytes = None self.mirror_pattern = None self.mirror_ignore = None self.has_persistent = None self.has_trainer = None self.mapper_id = None self.submapper_id = None self.nes2 = None self.prg_ram_bytes = None self.prg_nvram_bytes = None self.chr_ram_bytes = None self.chr_nvram_bytes = None self.trainer_data = None self.prg_rom_data = None self.chr_rom_data = None self.misc_rom_data = None self.verbose = verbose if filename is not None: self.load(filename)
MIT License
fabi1cazenave/kalamine
kalamine/template.py
web_keymap
python
def web_keymap(layout): keymap = {} for key_name in LAYER_KEYS: if key_name.startswith('-'): continue chars = list('') for i in [0, 1, 4, 5]: if key_name in layout.layers[i]: chars.append(layout.layers[i][key_name]) if len(chars): keymap[KEY_CODES['web'][key_name]] = chars return keymap
Web layout, main part.
https://github.com/fabi1cazenave/kalamine/blob/a74e0e5b241e8a3621fd6629a405587589f814db/kalamine/template.py#L386-L400
from .utils import load_data, LAYER_KEYS, ODK_ID KEY_CODES = load_data('key_codes.yaml') XKB_KEY_SYM = load_data('key_sym.yaml') def hex_ord(char): return hex(ord(char))[2:].zfill(4) def xml_proof(char): if char not in '<&"\u0020\u00a0\u202f>': return char else: return '&#x{0};'.format(hex_ord(char)) def xml_proof_id(symbol): return symbol[2:-1] if symbol.startswith('&#x') else symbol def xkb_keymap(layout, eight_levels): showDescription = True maxLength = 16 output = [] for keyName in LAYER_KEYS: if keyName.startswith('-'): if len(output): output.append('') output.append('//' + keyName[1:]) continue symbols = [] description = ' //' for layer in layout.layers: if keyName in layer: symbol = layer[keyName] desc = symbol if symbol in layout.dead_keys: dk = layout.dead_keys[symbol] desc = dk['alt_self'] if dk['char'] == ODK_ID: symbol = 'ISO_Level3_Latch' else: symbol = 'dead_' + dk['name'] elif symbol in XKB_KEY_SYM and len(XKB_KEY_SYM[symbol]) <= maxLength: symbol = XKB_KEY_SYM[symbol] else: symbol = 'U' + hex_ord(symbol).upper() else: desc = ' ' symbol = 'VoidSymbol' description += ' ' + desc symbols.append(symbol.ljust(maxLength)) s = 'key <{}> {{[ {}, {}, {}, {}]}};' if layout.has_altgr and layout.has_1dk: if eight_levels: s = 'key <{}> {{[ {}, {}, {}, {}, {}, {}, {}, {}]}};' symbols.append('VoidSymbol'.ljust(maxLength)) symbols.append('VoidSymbol'.ljust(maxLength)) else: s = 'key <{}> {{[ {}, {}, {}, {}],[ {}, {}]}};' elif layout.has_altgr: del symbols[3] del symbols[2] line = s.format(* [keyName.upper()] + symbols) if showDescription: line += description.rstrip() if line.endswith('\\'): line += ' ' output.append(line) return output def klc_keymap(layout): supportedSymbols = '1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' output = [] for keyName in LAYER_KEYS: if keyName.startswith('-'): continue symbols = [] description = '//' alpha = False for i in [0, 1, 4, 5]: layer = layout.layers[i] if keyName in layer: symbol = layer[keyName] desc = symbol if symbol in layout.dead_keys: desc = layout.dead_keys[symbol]['alt_space'] symbol = hex_ord(desc) + '@' else: if i == 0: alpha = symbol.upper() != symbol if symbol not in supportedSymbols: symbol = hex_ord(symbol) symbols.append(symbol) else: desc = ' ' symbols.append('-1') description += ' ' + desc if (layout.has_altgr): output.append('\t'.join([ KEY_CODES['klc'][keyName], '1' if alpha else '0', symbols[0], symbols[1], '-1', symbols[2], symbols[3], description.strip() ])) else: output.append('\t'.join([ KEY_CODES['klc'][keyName], '1' if alpha else '0', symbols[0], symbols[1], '-1', description.strip() ])) return output def klc_deadkeys(layout): output = [] def appendLine(base, alt): s = '{0}\t{1}\t// {2} -> {3}' output.append(s.format(hex_ord(base), hex_ord(alt), base, alt)) for k in layout.dk_index: dk = layout.dead_keys[k] output.append('// DEADKEY: ' + dk['name'].upper() + ' //{{{') output.append('DEADKEY\t' + hex_ord(dk['alt_space'])) if k == ODK_ID: output.extend(klc_1dk(layout)) else: for i in range(len(dk['base'])): appendLine(dk['base'][i], dk['alt'][i]) appendLine('\u00a0', dk['alt_space']) appendLine('\u0020', dk['alt_space']) output.append('//}}}') output.append('') return output[:-1] def klc_dk_index(layout): output = [] for k in layout.dk_index: dk = layout.dead_keys[k] output.append('{0}\t"{1}"'.format(hex_ord(dk['alt_space']), dk['name'].upper())) return output def klc_1dk(layout): output = [] for i in [0, 1]: baseLayer = layout.layers[i] extLayer = layout.layers[i + 2] for keyName in LAYER_KEYS: if keyName.startswith('- Space') or keyName == 'spce': continue if keyName.startswith('-'): if len(output): output.append('') output.append('//' + keyName[1:]) continue elif keyName in extLayer: base = baseLayer[keyName] if base in layout.dead_keys: base = layout.dead_keys[base]['alt_space'] ext = extLayer[keyName] if (ext in layout.dead_keys): ext = layout.dead_keys[ext]['alt_space'] odk = hex_ord(ext) + '@' else: odk = hex_ord(ext) output.append('\t'.join([ hex_ord(base), odk, '// ' + base + ' -> ' + ext ])) return output def osx_keymap(layout): str = [] for index in range(5): layer = layout.layers[[0, 1, 0, 4, 5][index]] caps = index == 2 def has_dead_keys(letter): if letter in '\u0020\u00a0\u202f': return True for k in layout.dead_keys: if letter in layout.dead_keys[k]['base']: return True return False output = [] for key_name in LAYER_KEYS: if key_name in ['ae13', 'ab11']: continue if key_name.startswith('-'): if len(output): output.append('') output.append('<!--' + key_name[1:] + ' -->') continue symbol = '&#x0010;' final_key = True if key_name in layer: key = layer[key_name] if key in layout.dead_keys: symbol = 'dead_' + layout.dead_keys[key]['name'] final_key = False else: symbol = xml_proof(key.upper() if caps else key) final_key = not has_dead_keys(key.upper()) char = 'code="{0}"'.format(KEY_CODES['osx'][key_name]).ljust(10) if final_key: action = 'output="{0}"'.format(symbol) else: action = 'action="{0}"'.format(xml_proof_id(symbol)) output.append('<key {0} {1} />'.format(char, action)) str.append(output) return str def osx_actions(layout): output = [] def when(state, action): s = 'state="{0}"'.format(state).ljust(18) if action in layout.dead_keys: a = 'next="{0}"'.format(layout.dead_keys[action]['name']) elif action.startswith('dead_'): a = 'next="{0}"'.format(action[5:]) else: a = 'output="{0}"'.format(xml_proof(action)) return ' <when {0} {1} />'.format(s, a) def append_actions(symbol, actions): output.append('<action id="{0}">'.format(xml_proof_id(symbol))) output.append(when('none', symbol)) for (state, out) in actions: output.append(when(state, out)) output.append('</action>') for key in layout.dead_keys: symbol = layout.dead_keys[key]['name'] output.append('<action id="dead_{0}">'.format(symbol)) output.append(' <when state="none" next="{0}" />'.format(symbol)) output.append('</action>') continue for key_name in LAYER_KEYS: if key_name.startswith('-'): output.append('') output.append('<!--' + key_name[1:] + ' -->') continue for i in [0, 1]: if key_name not in layout.layers[i]: continue key = layout.layers[i][key_name] if i and key == layout.layers[0][key_name]: continue if key in layout.dead_keys: continue actions = [] for k in layout.dk_index: dk = layout.dead_keys[k] if key in dk['base']: idx = dk['base'].index(key) actions.append((dk['name'], dk['alt'][idx])) if len(actions): append_actions(xml_proof(key), actions) actions = [] for k in layout.dk_index: dk = layout.dead_keys[k] actions.append((dk['name'], dk['alt_space'])) append_actions('&#x0020;', actions) append_actions('&#x00a0;', actions) append_actions('&#x202f;', actions) return output def osx_terminators(layout): output = [] for k in layout.dk_index: dk = layout.dead_keys[k] s = 'state="{0}"'.format(dk['name']).ljust(18) o = 'output="{0}"'.format(xml_proof(dk['alt_self'])) output.append('<when {0} {1} />'.format(s, o)) return output
MIT License
redhat-cip/restfuzz
restfuzz/os_api_ref_importer.py
OsApiRefFile.__init__
python
def __init__(self, fobj): if isinstance(fobj, str): fobj = open(fobj) self.filename = fobj.name self.fobj = fobj self.methods = [] method = {} last_title = None last_line = None parameters = None parameter_block = [] while True: line = self.fobj.readline() if not line: self.fobj.close() break if line == "\n": continue line = line[:-1] if re.match(r'^==*$', line) or re.match(r'^--*$', line): last_title = last_line elif line.startswith(".. rest_method:"): if method: if parameter_block: method["parameters"].append(parameter_block) self.methods.append(method) method = {'name': last_title, 'url': line[16:].strip(), 'parameters': []} elif line.startswith(".. rest_parameters::"): param_file = line.split()[-1] if param_file not in self.parameters_db: self.parameters_db[param_file] = yaml.load(open( os.path.join(os.path.dirname(self.filename), param_file))) parameters = self.parameters_db[param_file] elif parameters is not None: if not re.match(r"^ *- ", line): method["parameters"].append(parameter_block) parameters = None parameter_block = [] else: name, param_name = line.split(': ') name = re.match(r'\s+-\s*(.*)', name).groups()[0].strip() param_name = param_name.strip() param = parameters[param_name] parameter_block.append((name, param)) last_line = line if method: if parameter_block: method["parameters"].append(parameter_block) self.methods.append(method)
Load os-api-ref documentation files
https://github.com/redhat-cip/restfuzz/blob/3ba7b8d5b6b25be40fa69f2570b0e0af29b48e5e/restfuzz/os_api_ref_importer.py#L24-L86
import os import sys import re import yaml class OsApiRefFile: parameters_db = {}
Apache License 2.0
slackapi/python-slack-sdk
slack_sdk/web/deprecation.py
show_2020_01_deprecation
python
def show_2020_01_deprecation(method_name: str): skip_deprecation = os.environ.get( "SLACKCLIENT_SKIP_DEPRECATION" ) if skip_deprecation: return if not method_name: return matched_prefixes = [ prefix for prefix in deprecated_method_prefixes_2020_01 if method_name.startswith(prefix) ] if len(matched_prefixes) > 0: message = ( f"{method_name} is deprecated. Please use the Conversations API instead. " "For more info, go to " "https://api.slack.com/changelog/2020-01-deprecating-antecedents-to-the-conversations-api" ) warnings.warn(message)
Prints a warning if the given method is deprecated
https://github.com/slackapi/python-slack-sdk/blob/6ae9a3ebd847857b5dd2dc89b6af75bff313ee3d/slack_sdk/web/deprecation.py#L14-L36
import os import warnings deprecated_method_prefixes_2020_01 = [ "channels.", "groups.", "im.", "mpim.", "admin.conversations.whitelist.", ]
MIT License
richrd/suplemon
suplemon/modules/battery.py
Battery.readf
python
def readf(self, path): f = open(path) data = f.read() f.close() return data
Read and return file contents at path.
https://github.com/richrd/suplemon/blob/8bb67d6758e5bc5ca200fdce7a0fb6635abb66f4/suplemon/modules/battery.py#L106-L111
import os import time import subprocess from suplemon import helpers from suplemon.suplemon_module import Module class Battery(Module): def init(self): self.last_value = -1 self.checked = time.time() self.interval = 60 def value(self): if self.last_value == -1: state = self.battery_status() elif time.time()-self.checked > self.interval: state = self.battery_status() else: return self.last_value self.last_value = state return state def value_str(self): val = self.value() if val: if self.app.config["app"]["use_unicode_symbols"]: return "\u26A1{0}%".format(str(val)) else: return "BAT {0}%".format(str(val)) return "" def get_status(self): return self.value_str() def battery_status(self): value = None methods = [ self.battery_status_read, self.battery_status_acpi, self.battery_status_upower ] for m in methods: value = m() if value is not None: break return value def battery_status_read(self): try: path_info = self.readf("/proc/acpi/battery/BAT0/info") path_state = self.readf("/proc/acpi/battery/BAT0/state") except: return None try: max_cap = float(helpers.get_string_between("last full capacity:", "mWh", path_info)) cur_cap = float(helpers.get_string_between("remaining capacity:", "mWh", path_state)) return int(cur_cap / max_cap * 100) except: return None def battery_status_acpi(self): try: fnull = open(os.devnull, "w") raw_str = subprocess.check_output(["acpi"], stderr=fnull) fnull.close() except: return None raw_str = raw_str.decode("utf-8") part = helpers.get_string_between(",", "%", raw_str) if part: try: return int(part) except: return None return None def battery_status_upower(self): path = "/org/freedesktop/UPower/devices/battery_BAT0" try: raw_str = subprocess.check_output(["upower", "-i", path]) except: return None raw_str = raw_str.decode("utf-8") raw_str = raw_str.splitlines()[0] part = helpers.get_string_between("percentage:", "%", raw_str) if part: try: return int(part) except: return None return None
MIT License
napalm-automation/napalm-yang
napalm_yang/models/openconfig/interfaces/interface/routed_vlan/ipv4/unnumbered/__init__.py
unnumbered._set_interface_ref
python
def _set_interface_ref(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=interface_ref.interface_ref, is_container="container", yang_name="interface-ref", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """interface_ref must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=interface_ref.interface_ref, is_container='container', yang_name="interface-ref", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""", } ) self.__interface_ref = t if hasattr(self, "_set"): self._set()
Setter method for interface_ref, mapped from YANG variable /interfaces/interface/routed_vlan/ipv4/unnumbered/interface_ref (container) If this variable is read-only (config: false) in the source YANG file, then _set_interface_ref is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_interface_ref() directly. YANG Description: Reference to an interface or subinterface
https://github.com/napalm-automation/napalm-yang/blob/9148e015b086ebe311c07deb92e168ea36fd7771/napalm_yang/models/openconfig/interfaces/interface/routed_vlan/ipv4/unnumbered/__init__.py#L261-L300
from operator import attrgetter from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType from pyangbind.lib.yangtypes import RestrictedClassType from pyangbind.lib.yangtypes import TypedListType from pyangbind.lib.yangtypes import YANGBool from pyangbind.lib.yangtypes import YANGListType from pyangbind.lib.yangtypes import YANGDynClass from pyangbind.lib.yangtypes import ReferenceType from pyangbind.lib.base import PybindBase from collections import OrderedDict from decimal import Decimal from bitarray import bitarray import six if six.PY3: import builtins as __builtin__ long = int elif six.PY2: import __builtin__ from . import config from . import state from . import interface_ref class unnumbered(PybindBase): __slots__ = ( "_path_helper", "_extmethods", "__config", "__state", "__interface_ref" ) _yang_name = "unnumbered" _pybind_generated_by = "container" def __init__(self, *args, **kwargs): self._path_helper = False self._extmethods = False self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) self.__interface_ref = YANGDynClass( base=interface_ref.interface_ref, is_container="container", yang_name="interface-ref", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path() + [self._yang_name] else: return ["interfaces", "interface", "routed-vlan", "ipv4", "unnumbered"] def _get_config(self): return self.__config def _set_config(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """config must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""", } ) self.__config = t if hasattr(self, "_set"): self._set() def _unset_config(self): self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) def _get_state(self): return self.__state def _set_state(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """state must be of a type compatible with container""", "defined-type": "container", "generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""", } ) self.__state = t if hasattr(self, "_set"): self._set() def _unset_state(self): self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) def _get_interface_ref(self): return self.__interface_ref
Apache License 2.0
brikwerk/nxbt
nxbt/bluez.py
BlueZ.get_discovered_devices
python
def get_discovered_devices(self): bluez_objects = dbus.Interface( self.bus.get_object(SERVICE_NAME, "/"), "org.freedesktop.DBus.ObjectManager") devices = {} objects = bluez_objects.GetManagedObjects() for path, interfaces in list(objects.items()): if DEVICE_INTERFACE in interfaces: devices[str(path)] = interfaces[DEVICE_INTERFACE] return devices
Gets a dict of all discovered (or previously discovered and connected) devices. The key is the device's dbus object path and the values are the device's properties. The following is a non-exhaustive list of the properties a device dictionary can contain: - "Address": The Bluetooth address - "Alias": The friendly name of the device - "Paired": Whether the device is paired - "Connected": Whether the device is presently connected - "UUIDs": The services a device provides :return: A dictionary of all discovered devices :rtype: dictionary
https://github.com/brikwerk/nxbt/blob/5a7a8c0ff5ee1f075bef8924051de462179a9dd5/nxbt/bluez.py#L714-L741
import subprocess import re import os import time import logging from shutil import which import random from pathlib import Path import dbus SERVICE_NAME = "org.bluez" BLUEZ_OBJECT_PATH = "/org/bluez" ADAPTER_INTERFACE = SERVICE_NAME + ".Adapter1" PROFILEMANAGER_INTERFACE = SERVICE_NAME + ".ProfileManager1" DEVICE_INTERFACE = SERVICE_NAME + ".Device1" def find_object_path(bus, service_name, interface_name, object_name=None): manager = dbus.Interface( bus.get_object(service_name, "/"), "org.freedesktop.DBus.ObjectManager") for path, ifaces in manager.GetManagedObjects().items(): managed_interface = ifaces.get(interface_name) if managed_interface is None: continue elif (not object_name or object_name == managed_interface["Address"] or path.endswith(object_name)): obj = bus.get_object(service_name, path) return dbus.Interface(obj, interface_name).object_path return None def find_objects(bus, service_name, interface_name): manager = dbus.Interface( bus.get_object(service_name, "/"), "org.freedesktop.DBus.ObjectManager") paths = [] for path, ifaces in manager.GetManagedObjects().items(): managed_interface = ifaces.get(interface_name) if managed_interface is None: continue else: obj = bus.get_object(service_name, path) path = str(dbus.Interface(obj, interface_name).object_path) paths.append(path) return paths def toggle_clean_bluez(toggle): service_path = "/lib/systemd/system/bluetooth.service" override_dir = Path("/run/systemd/system/bluetooth.service.d") override_path = override_dir / "nxbt.conf" if toggle: if override_path.is_file(): return with open(service_path) as f: for line in f: if line.startswith("ExecStart="): exec_start = line.strip() + " --compat --noplugin=*" break else: raise Exception("systemd service file doesn't have a ExecStart line") override = f"[Service]\nExecStart=\n{exec_start}" override_dir.mkdir(parents=True, exist_ok=True) with override_path.open("w") as f: f.write(override) else: try: os.remove(override_path) except FileNotFoundError: return _run_command(["systemctl", "daemon-reload"]) _run_command(["systemctl", "restart", "bluetooth"]) time.sleep(0.5) def clean_sdp_records(): if which("sdptool") is None: raise Exception("sdptool is not available on this system." + "If you can, please install this tool, as " + "it is required for proper functionality.") _run_command(["chmod", "777", "/var/run/sdp"]) result = _run_command(['sdptool', 'browse', 'local']).stdout.decode('utf-8') if result is None or len(result.split('\n\n')) < 1: return exceptions = ["PnP Information"] service_rec_handles = [] for rec in result.split('\n\n'): exception_found = False for exception in exceptions: if exception in rec: exception_found = True break if exception_found: continue for line in rec.split('\n'): if "Service RecHandle" in line: service_rec_handles.append(line.split(" ")[2]) if len(service_rec_handles) > 0: for record_handle in service_rec_handles: _run_command(['sdptool', 'del', record_handle]) def _run_command(command): result = subprocess.run( command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) cmd_err = result.stderr.decode("utf-8").replace("\n", "") if cmd_err != "": raise Exception(cmd_err) return result def get_random_controller_mac(): def seg(): random_number = random.randint(0,255) hex_number = str(hex(random_number)) hex_number = hex_number[2:].upper() return str(hex_number) return f"7C:BB:8A:{seg()}:{seg()}:{seg()}" def replace_mac_addresses(adapter_paths, addresses): if which("hcitool") is None: raise Exception("hcitool is not available on this system." + "If you can, please install this tool, as " + "it is required for proper functionality.") if which("hciconfig") is None: raise Exception("hciconfig is not available on this system." + "If you can, please install this tool, as " + "it is required for proper functionality.") if addresses: assert len(addresses) == len(adapter_paths) for i in range(len(adapter_paths)): adapter_id = adapter_paths[i].split('/')[-1] mac = addresses[i].split(':') cmds = ['hcitool', '-i', adapter_id, 'cmd', '0x3f', '0x001', f'0x{mac[5]}',f'0x{mac[4]}',f'0x{mac[3]}',f'0x{mac[2]}', f'0x{mac[1]}',f'0x{mac[0]}'] _run_command(cmds) _run_command(['hciconfig', adapter_id, 'reset']) def find_devices_by_alias(alias, return_path=False, created_bus=None): if created_bus is not None: bus = created_bus else: bus = dbus.SystemBus() devices = find_objects( bus, SERVICE_NAME, DEVICE_INTERFACE) addresses = [] matching_paths = [] for path in devices: device_props = dbus.Interface( bus.get_object(SERVICE_NAME, path), "org.freedesktop.DBus.Properties") device_alias = device_props.Get( DEVICE_INTERFACE, "Alias").upper() device_addr = device_props.Get( DEVICE_INTERFACE, "Address").upper() if device_alias.upper() == alias.upper(): addresses.append(device_addr) matching_paths.append(path) if created_bus is None: bus.close() if return_path: return addresses, matching_paths else: return addresses def disconnect_devices_by_alias(alias, created_bus=None): if created_bus is not None: bus = created_bus else: bus = dbus.SystemBus() devices = find_objects( bus, SERVICE_NAME, DEVICE_INTERFACE) addresses = [] matching_paths = [] for path in devices: device_props = dbus.Interface( bus.get_object(SERVICE_NAME, path), "org.freedesktop.DBus.Properties") device_alias = device_props.Get( DEVICE_INTERFACE, "Alias").upper() if device_alias.upper() == alias.upper(): device = dbus.Interface( bus.get_object(SERVICE_NAME, path), DEVICE_INTERFACE) try: device.Disconnect() except Exception as e: print(e) if created_bus is None: bus.close() class BlueZ(): def __init__(self, adapter_path="/org/bluez/hci0"): self.logger = logging.getLogger('nxbt') self.bus = dbus.SystemBus() self.device_path = adapter_path if self.device_path is None: self.device_path = find_object_path( self.bus, SERVICE_NAME, ADAPTER_INTERFACE) if self.device_path is None: raise Exception("Unable to find a bluetooth adapter") self.logger.debug(f"Using adapter under object path: {self.device_path}") self.device = dbus.Interface( self.bus.get_object( SERVICE_NAME, self.device_path), "org.freedesktop.DBus.Properties") self.device_id = self.device_path.split("/")[-1] self.profile_manager = dbus.Interface(self.bus.get_object( SERVICE_NAME, BLUEZ_OBJECT_PATH), PROFILEMANAGER_INTERFACE) self.adapter = dbus.Interface( self.bus.get_object( SERVICE_NAME, self.device_path), ADAPTER_INTERFACE) @property def address(self): return self.device.Get(ADAPTER_INTERFACE, "Address").upper() def set_address(self, mac): if which("hcitool") is None: raise Exception("hcitool is not available on this system." + "If you can, please install this tool, as " + "it is required for proper functionality.") mac = mac.split(":") cmds = ['hcitool', '-i', self.device_id, 'cmd', '0x3f', '0x001', f'0x{mac[5]}',f'0x{mac[4]}',f'0x{mac[3]}',f'0x{mac[2]}', f'0x{mac[1]}',f'0x{mac[0]}'] _run_command(cmds) _run_command(['hciconfig', self.device_id, 'reset']) def set_class(self, device_class): if which("hciconfig") is None: raise Exception("hciconfig is not available on this system." + "If you can, please install this tool, as " + "it is required for proper functionality.") _run_command(['hciconfig', self.device_id, 'class', device_class]) def reset_adapter(self): if which("hciconfig") is None: raise Exception("hciconfig is not available on this system." + "If you can, please install this tool, as " + "it is required for proper functionality.") _run_command(['hciconfig', self.device_id, 'reset']) @property def name(self): return self.device.Get(ADAPTER_INTERFACE, "Name") @property def alias(self): return self.device.Get(ADAPTER_INTERFACE, "Alias") def set_alias(self, value): self.device.Set(ADAPTER_INTERFACE, "Alias", value) @property def pairable(self): return bool(self.device.Get(ADAPTER_INTERFACE, "Pairable")) def set_pairable(self, value): dbus_value = dbus.Boolean(value) self.device.Set(ADAPTER_INTERFACE, "Pairable", dbus_value) @property def pairable_timeout(self): return self.device.Get(ADAPTER_INTERFACE, "PairableTimeout") def set_pairable_timeout(self, value): dbus_value = dbus.UInt32(value) self.device.Set(ADAPTER_INTERFACE, "PairableTimeout", dbus_value) @property def discoverable(self): return bool(self.device.Get(ADAPTER_INTERFACE, "Discoverable")) def set_discoverable(self, value): dbus_value = dbus.Boolean(value) self.device.Set(ADAPTER_INTERFACE, "Discoverable", dbus_value) @property def discoverable_timeout(self): return self.device.Get(ADAPTER_INTERFACE, "DiscoverableTimeout") def set_discoverable_timeout(self, value): dbus_value = dbus.UInt32(value) self.device.Set( ADAPTER_INTERFACE, "DiscoverableTimeout", dbus_value) @property def device_class(self): result = subprocess.run( ["hciconfig", self.device_id, "class"], stdout=subprocess.PIPE) device_class = result.stdout.decode("utf-8").split("Class: ")[1][0:8] return device_class def set_device_class(self, device_class): if os.geteuid() != 0: raise PermissionError("The device class must be set as root") if len(device_class) != 8: raise ValueError("Device class must be length 8") result = subprocess.run( ["hciconfig", self.device_id, "class", device_class], stderr=subprocess.PIPE) cmd_err = result.stderr.decode("utf-8").replace("\n", "") if cmd_err != "": raise Exception(cmd_err) @property def powered(self): return bool(self.device.Get(ADAPTER_INTERFACE, "Powered")) def set_powered(self, value): dbus_value = dbus.Boolean(value) self.device.Set(ADAPTER_INTERFACE, "Powered", dbus_value) def register_profile(self, profile_path, uuid, opts): return self.profile_manager.RegisterProfile(profile_path, uuid, opts) def unregister_profile(self, profile): self.profile_manager.UnregisterProfile(profile) def reset(self): result = subprocess.run( ["systemctl", "restart", "bluetooth"], stderr=subprocess.PIPE) cmd_err = result.stderr.decode("utf-8").replace("\n", "") if cmd_err != "": raise Exception(cmd_err) self.device = dbus.Interface( self.bus.get_object( SERVICE_NAME, self.device_path), "org.freedesktop.DBus.Properties") self.profile_manager = dbus.Interface( self.bus.get_object( SERVICE_NAME, BLUEZ_OBJECT_PATH), PROFILEMANAGER_INTERFACE)
MIT License
jiacheng-xu/cached-lstm
Model/lstm_standard.py
sgd
python
def sgd(lr, tparams, grads, x, mask, y, cost): gshared = [theano.shared(p.get_value() * 0., name='%s_grad' % k) for k, p in tparams.iteritems()] gsup = [(gs, g) for gs, g in zip(gshared, grads)] f_grad_shared = theano.function([x, mask, y], cost, updates=gsup, name='sgd_f_grad_shared') pup = [(p, p - lr * g) for p, g in zip(tparams.values(), gshared)] f_update = theano.function([lr], [], updates=pup, name='sgd_f_update') return f_grad_shared, f_update
Stochastic Gradient Descent :note: A more complicated version of sgd then needed. This is done like that for adadelta and rmsprop.
https://github.com/jiacheng-xu/cached-lstm/blob/8e684d5558e18216d6ec8186f8fd46fa1e15ccfd/Model/lstm_standard.py#L210-L235
from collections import OrderedDict import cPickle as pkl import sys import time import numpy import theano from theano import config import theano.tensor as tensor from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams import imdb datasets = {'imdb': (imdb.load_data, imdb.prepare_data)} SEED = 123 numpy.random.seed(SEED) def numpy_floatX(data): return numpy.asarray(data, dtype=config.floatX) def get_minibatches_idx(n, minibatch_size, shuffle=False): idx_list = numpy.arange(n, dtype="int32") if shuffle: numpy.random.shuffle(idx_list) minibatches = [] minibatch_start = 0 for i in range(n // minibatch_size): minibatches.append(idx_list[minibatch_start: minibatch_start + minibatch_size]) minibatch_start += minibatch_size if (minibatch_start != n): minibatches.append(idx_list[minibatch_start:]) return zip(range(len(minibatches)), minibatches) def get_dataset(name): return datasets[name][0], datasets[name][1] def zipp(params, tparams): for kk, vv in params.iteritems(): tparams[kk].set_value(vv) def unzip(zipped): new_params = OrderedDict() for kk, vv in zipped.iteritems(): new_params[kk] = vv.get_value() return new_params def dropout_layer(state_before, use_noise, trng): proj = tensor.switch(use_noise, (state_before * trng.binomial(state_before.shape, p=0.5, n=1, dtype=state_before.dtype)), state_before * 0.5) return proj def _p(pp, name): return '%s_%s' % (pp, name) def init_params(options): params = OrderedDict() randn = numpy.random.rand(options['n_words'], options['dim_proj']) params['Wemb'] = (0.01 * randn).astype(config.floatX) params = get_layer(options['encoder'])[0](options, params, prefix=options['encoder']) params['U'] = 0.01 * numpy.random.randn(options['dim_proj'], options['ydim']).astype(config.floatX) params['b'] = numpy.zeros((options['ydim'],)).astype(config.floatX) return params def load_params(path, params): pp = numpy.load(path) for kk, vv in params.iteritems(): if kk not in pp: raise Warning('%s is not in the archive' % kk) params[kk] = pp[kk] return params def init_tparams(params): tparams = OrderedDict() for kk, pp in params.iteritems(): tparams[kk] = theano.shared(params[kk], name=kk) return tparams def get_layer(name): fns = layers[name] return fns def ortho_weight(ndim): W = numpy.random.randn(ndim, ndim) u, s, v = numpy.linalg.svd(W) return u.astype(config.floatX) def param_init_lstm(options, params, prefix='lstm'): W = numpy.concatenate([ortho_weight(options['dim_proj']), ortho_weight(options['dim_proj']), ortho_weight(options['dim_proj']), ortho_weight(options['dim_proj'])], axis=1) params[_p(prefix, 'W')] = W U = numpy.concatenate([ortho_weight(options['dim_proj']), ortho_weight(options['dim_proj']), ortho_weight(options['dim_proj']), ortho_weight(options['dim_proj'])], axis=1) params[_p(prefix, 'U')] = U b = numpy.zeros((4 * options['dim_proj'],)) params[_p(prefix, 'b')] = b.astype(config.floatX) return params def lstm_layer(tparams, state_below, options, prefix='lstm', mask=None): nsteps = state_below.shape[0] if state_below.ndim == 3: n_samples = state_below.shape[1] else: n_samples = 1 assert mask is not None def _slice(_x, n, dim): if _x.ndim == 3: return _x[:, :, n * dim:(n + 1) * dim] return _x[:, n * dim:(n + 1) * dim] def _step(m_, x_, h_, c_): preact = tensor.dot(h_, tparams[_p(prefix, 'U')]) preact += x_ i = tensor.nnet.sigmoid(_slice(preact, 0, options['dim_proj'])) f = tensor.nnet.sigmoid(_slice(preact, 1, options['dim_proj'])) o = tensor.nnet.sigmoid(_slice(preact, 2, options['dim_proj'])) c = tensor.tanh(_slice(preact, 3, options['dim_proj'])) c = f * c_ + i * c c = m_[:, None] * c + (1. - m_)[:, None] * c_ h = o * tensor.tanh(c) h = m_[:, None] * h + (1. - m_)[:, None] * h_ return h, c state_below = (tensor.dot(state_below, tparams[_p(prefix, 'W')]) + tparams[_p(prefix, 'b')]) dim_proj = options['dim_proj'] rval, updates = theano.scan(_step, sequences=[mask, state_below], outputs_info=[tensor.alloc(numpy_floatX(0.), n_samples, dim_proj), tensor.alloc(numpy_floatX(0.), n_samples, dim_proj)], name=_p(prefix, '_layers'), n_steps=nsteps) return rval[0] layers = {'lstm': (param_init_lstm, lstm_layer)}
Apache License 2.0
tensorspeech/tensorflowasr
tensorflow_asr/featurizers/text_featurizers.py
CharFeaturizer.iextract
python
def iextract( self, indices: tf.Tensor, ) -> tf.Tensor: indices = self.normalize_indices(indices) tokens = tf.gather_nd(self.tokens, tf.expand_dims(indices, axis=-1)) with tf.device("/CPU:0"): tokens = tf.strings.reduce_join(tokens, axis=-1) return tokens
Convert list of indices to string Args: indices: tf.Tensor with dim [B, None] Returns: transcripts: tf.Tensor of dtype tf.string with dim [B]
https://github.com/tensorspeech/tensorflowasr/blob/283d1f2df3a4da021721c222a51c1e4bbc709238/tensorflow_asr/featurizers/text_featurizers.py#L206-L222
import abc import codecs import os import unicodedata from multiprocessing import cpu_count import numpy as np import sentencepiece as sp import tensorflow as tf import tensorflow_datasets as tds from ..configs.config import DecoderConfig from ..utils import file_util ENGLISH_CHARACTERS = [ " ", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "'", ] class TextFeaturizer(metaclass=abc.ABCMeta): def __init__( self, decoder_config: dict, ): self.scorer = None self.decoder_config = DecoderConfig(decoder_config) self.blank = None self.tokens2indices = {} self.tokens = [] self.num_classes = None self.max_length = 0 @property def shape(self) -> list: return [self.max_length if self.max_length > 0 else None] @property def prepand_shape(self) -> list: return [self.max_length + 1 if self.max_length > 0 else None] def update_length( self, length: int, ): self.max_length = max(self.max_length, length) def reset_length(self): self.max_length = 0 def preprocess_text(self, text): text = unicodedata.normalize("NFC", text.lower()) return text.strip("\n") def add_scorer( self, scorer: any = None, ): self.scorer = scorer def normalize_indices( self, indices: tf.Tensor, ) -> tf.Tensor: with tf.name_scope("normalize_indices"): minus_one = -1 * tf.ones_like(indices, dtype=tf.int32) blank_like = self.blank * tf.ones_like(indices, dtype=tf.int32) return tf.where(indices == minus_one, blank_like, indices) def prepand_blank( self, text: tf.Tensor, ) -> tf.Tensor: return tf.concat([[self.blank], text], axis=0) @abc.abstractclassmethod def extract(self, text): raise NotImplementedError() @abc.abstractclassmethod def iextract(self, indices): raise NotImplementedError() @abc.abstractclassmethod def indices2upoints(self, indices): raise NotImplementedError() class CharFeaturizer(TextFeaturizer): def __init__( self, decoder_config: dict, ): super(CharFeaturizer, self).__init__(decoder_config) self.__init_vocabulary() def __init_vocabulary(self): lines = [] if self.decoder_config.vocabulary is not None: with codecs.open(self.decoder_config.vocabulary, "r", "utf-8") as fin: lines.extend(fin.readlines()) else: lines = ENGLISH_CHARACTERS self.blank = 0 if self.decoder_config.blank_at_zero else None self.tokens2indices = {} self.tokens = [] index = 1 if self.blank == 0 else 0 for line in lines: line = self.preprocess_text(line) if line.startswith("#") or not line: continue self.tokens2indices[line[0]] = index self.tokens.append(line[0]) index += 1 if self.blank is None: self.blank = len(self.tokens) self.non_blank_tokens = self.tokens.copy() self.tokens.insert(self.blank, "") self.num_classes = len(self.tokens) self.tokens = tf.convert_to_tensor(self.tokens, dtype=tf.string) self.upoints = tf.strings.unicode_decode(self.tokens, "UTF-8").to_tensor(shape=[None, 1]) def extract( self, text: str, ) -> tf.Tensor: text = self.preprocess_text(text) text = list(text.strip()) indices = [self.tokens2indices[token] for token in text] return tf.convert_to_tensor(indices, dtype=tf.int32)
Apache License 2.0
necaris/python3-openid
openid/yadis/xri.py
iriToURI
python
def iriToURI(iri): if isinstance(iri, bytes): iri = str(iri, encoding="utf-8") return iri.encode('ascii', errors='oid_percent_escape').decode()
Transform an IRI to a URI by escaping unicode.
https://github.com/necaris/python3-openid/blob/5c7f8f8fa4d2a0124516046ab2f84130eb8c10cb/openid/yadis/xri.py#L59-L64
import re from functools import reduce from openid import codecutil XRI_AUTHORITIES = ['!', '=', '@', '+', '$', '('] def identifierScheme(identifier): if identifier.startswith('xri://') or (identifier and identifier[0] in XRI_AUTHORITIES): return "XRI" else: return "URI" def toIRINormal(xri): if not xri.startswith('xri://'): xri = 'xri://' + xri return escapeForIRI(xri) _xref_re = re.compile(r'\((.*?)\)') def _escape_xref(xref_match): xref = xref_match.group() xref = xref.replace('/', '%2F') xref = xref.replace('?', '%3F') xref = xref.replace('#', '%23') return xref def escapeForIRI(xri): xri = xri.replace('%', '%25') xri = _xref_re.sub(_escape_xref, xri) return xri def toURINormal(xri): return iriToURI(toIRINormal(xri))
Apache License 2.0
googlearchive/simian
src/simian/auth/base.py
Auth1.LoadSelfKey
python
def LoadSelfKey(self, keystr): key = self._LoadKey(keystr) self._key = key
Load a key and keep it as this instance's key. Args: keystr: str, bytes of key in PEM format
https://github.com/googlearchive/simian/blob/fb9c43946ff7ba29be417068d6447cfc0adfe9ef/src/simian/auth/base.py#L681-L688
import warnings warnings.filterwarnings( 'ignore', '.* sha module .*', DeprecationWarning, '.*', 0) import array import base64 import datetime import logging import os import struct from simian.auth import tlslite_bridge from simian.auth import x509 MSG_SEP = ' ' AGE_TOKEN_SECONDS = 6 * 60 * 60 AGE_CN_SECONDS = 5 * 60 AGE_DEFAULT_SECONDS = 6 * 60 * 60 AGE_APPLESUS_TOKEN_SECONDS = 21 * 24 * 60 * 60 MIN_VALUE_CN = 2**100 LEVEL_APPLESUS = -5 LEVEL_BASE = 0 LEVEL_ADMIN = 5 class Error(Exception): class NotAuthenticated(Error): def __init__(self, reason='Unknown'): self.reason = reason super(NotAuthenticated, self).__init__() class AuthSessionError(Error): class KeyNotLoaded(Error): class CertificateParseError(Error): class CertificateError(Error): class MessageError(Error): class SessionDataError(Error): class CryptoError(Error): class State(object): NONE = 'NONE' INPUT = 'INPUT' OUTPUT = 'OUTPUT' class AuthState(object): UNKNOWN = 'UNKNOWN' OK = 'OK' FAIL = 'FAIL' class AuthSessionBase(object): def _Create(self, sid): raise NotImplementedError def _Get(self, sid): raise NotImplementedError def _Put(self, session): raise NotImplementedError def Set(self, sid, data=None, **kwargs): session = self._Create(sid) session.data = data if kwargs: for k in kwargs: if k not in ['sid', 'mtime']: setattr(session, k, kwargs[k]) self._Put(session) def Get(self, sid): session = self._Get(sid) if session: if not self.ExpireOne(session): if session.data is not None: return session.data else: return session def DeleteById(self, sid): raise NotImplementedError def _Now(self): now = datetime.datetime.utcnow() return now def _Mtime(self, session): return session.mtime def Delete(self, session): raise NotImplementedError def All(self, min_age_seconds=None): raise NotImplementedError def ExpireOne(self, session, now=None): if self.IsExpired(session, now=now): self.Delete(session) return True else: return False def IsExpired(self, session, now=None): if now is None: now = self._Now() age = datetime.timedelta(seconds=AGE_DEFAULT_SECONDS) session_mtime = self._Mtime(session) if session_mtime: d = now - session_mtime else: d = age + age if d > age: return True else: return False @classmethod def DefineSessionType(cls, name, prefix): sane_name = name[0].upper() + name[1:].lower() setattr( cls, 'SESSION_TYPE_PREFIX_%s' % name.upper(), '%s_' % prefix) setattr( cls, 'Set%s' % sane_name, lambda self, k, data=None, **kwargs: self.Set( '%s_%s' % (prefix, k), data, **kwargs)) setattr( cls, 'Get%s' % sane_name, lambda self, k: self.Get('%s_%s' % (prefix, k))) setattr( cls, 'Del%s' % sane_name, lambda self, k: self.DeleteById('%s_%s' % (prefix, k))) class AuthSessionData(object): def __init__(self, **kwargs): if kwargs: self.__dict__ = dict(kwargs) def __contains__(self, item): return item in self.__dict__ def __eq__(self, other): if type(other) == dict: return self.__dict__ == other for k in self.__dict__: if not hasattr(other, k) or self.__dict__[k] != getattr(other, k): return False return True def __ne__(self, other): return not self.__eq__(other) class AuthSessionDict(AuthSessionBase): def __init__(self): self._sessions = {} def _Create(self, sid): return AuthSessionData(sid=sid, mtime=self._Now(), data=None) def _Get(self, sid): return self._sessions.get(sid, None) def _Put(self, session): self._sessions[session.sid] = session def DeleteById(self, sid): try: del self._sessions[sid] except KeyError: pass def Delete(self, session): self.DeleteById(session.sid) def All(self, unused_min_age_seconds=None): for session in self._sessions: yield self._sessions[session] class Auth1ServerSession(AuthSessionDict): AuthSessionBase.DefineSessionType('cn', 'cn') AuthSessionBase.DefineSessionType('token', 't') class Auth1ClientSession(AuthSessionDict): class AuthBase(object): def __init__(self): self._output = None self._error_output = [] self._session_class = self.GetSessionClass() self._session = self._session_class() self._default_state = self.DefaultState() self.ResetState() def GetSessionClass(self): return AuthSessionBase def DefaultState(self): return State.NONE def ResetState(self): self._state = self._default_state self._auth_state = AuthState.UNKNOWN def AuthFail(self): self.ResetState() self._auth_state = AuthState.FAIL def _AddOutput(self, output): if self._output is not None: if type(output) is dict: self._output.update(output) else: self._output += output else: self._output = output self._state = State.OUTPUT def _AddError(self, errstr): if self._error_output is None: self._error_output = [errstr] else: self._error_output.append(errstr) def ErrorOutput(self): err_output = self._error_output self._error_output = [] return err_output def State(self): return self._state def Input(self, *unused_args, **unused_kwargs): if self._state == State.INPUT: return raise ValueError('not waiting for input') def Output(self): if self._state == State.OUTPUT: output = self._output self._output = None self._state = self._default_state return output def AuthState(self): auth_state = self._auth_state if self._auth_state in [AuthState.OK, AuthState.FAIL]: self.ResetState() return auth_state def AuthStateOK(self): auth_state = self.AuthState() return auth_state == AuthState.OK def _SplitMessage(self, m, expect_len): a = m.split(MSG_SEP) if len(a) != expect_len: raise MessageError('wrong number of message items %d %s', len(a), a) return a def _AssembleMessage(self, *args): return MSG_SEP.join(args) class Auth1(AuthBase): TOKEN = 'Auth1Token' def __init__(self, *args, **kwargs): super(Auth1, self).__init__(*args, **kwargs) self._key = None self._cert = None self._ca_pem = '' self._server_cert_pem = '' self._required_issuer = None def GetSessionClass(self): return Auth1ServerSession def DefaultState(self): return State.INPUT def Nonce(self): s = os.urandom(16) i = struct.unpack('QQ', s) i = (i[0] << 64) + i[1] return i def NonceBase64(self): return base64.urlsafe_b64encode(str(self.Nonce())) def GetCurrentEpochTimeUTC(self): return int(datetime.datetime.utcnow().strftime('%s')) def _AuthToken(self): if self._auth_state == AuthState.OK: return self.NonceBase64() def _LoadCert(self, certstr): try: cert = x509.LoadCertificateFromPEM(certstr) except x509.Error, e: raise ValueError(str(e)) return cert def _LoadKey(self, keystr): try: key = tlslite_bridge.parsePEMKey(keystr) except (SyntaxError, AttributeError), e: raise ValueError('invalid PEM key format: %s' % str(e)) return key def Sign(self, datastr): if not self._key: raise KeyNotLoaded data_bytes = array.array('B') data_bytes.fromstring(datastr) sig_bytes = self._key.hashAndSign(data_bytes) if isinstance(sig_bytes, bytearray): return str(sig_bytes) else: return sig_bytes.tostring()
Apache License 2.0
hanyas/trajopt
trajopt/envs/quanser/common.py
Base.seed
python
def seed(self, seed=None): self._np_random, seed = seeding.np_random(seed) return [seed]
Set the random seed. :param seed: random seed :type seed: int :return: list
https://github.com/hanyas/trajopt/blob/1cad9010be45851ec12fe4156ae73d9261304cb9/trajopt/envs/quanser/common.py#L211-L220
import socket import struct import autograd.numpy as np from scipy import signal import gym from gym import spaces from gym.utils import seeding class QSocket: def __init__(self, ip, x_len, u_len): self._x_fmt = '>' + x_len * 'd' self._u_fmt = '>' + u_len * 'd' self._buf_size = x_len * 8 self._port = 9095 self._ip = ip self._soc = None def snd_rcv(self, u): self._soc.send(struct.pack(self._u_fmt, *u)) data = self._soc.recv(self._buf_size) return np.array(struct.unpack(self._x_fmt, data), dtype=np.float32) def open(self): if self._soc is None: self._soc = socket.socket() self._soc.connect((self._ip, self._port)) def close(self): if self._soc is not None: self._soc.close() self._soc = None def is_open(self): open = True if self._soc is None: open = False return open class SymmetricBoxSpace: def __init__(self, bound: np.ndarray, labels: tuple): self.bound_lo = -bound self.bound_up = bound self.labels = labels self.dim = len(labels) def project(self, ele: np.ndarray): return np.clip(ele, self.bound_lo, self.bound_up) class VelocityFilter: def __init__(self, x_len, num=(50, 0), den=(1, 50), dt=0.002, x_init=None): derivative_filter = signal.cont2discrete((num, den), dt) self.b = derivative_filter[0].ravel().astype(np.float32) self.a = derivative_filter[1].astype(np.float32) if x_init is None: self.z = np.zeros((max(len(self.a), len(self.b)) - 1, x_len), dtype=np.float32) else: self.set_initial_state(x_init) def set_initial_state(self, x_init): assert isinstance(x_init, np.ndarray) zi = signal.lfilter_zi(self.b, self.a) self.z = np.outer(zi, x_init) def __call__(self, x): xd, self.z = signal.lfilter(self.b, self.a, x[None, :], 0, self.z) return xd.ravel() class LabeledBox(spaces.Box): def __init__(self, labels, **kwargs): super(LabeledBox, self).__init__(**kwargs) assert len(labels) == self.high.size self.labels = labels class GentlyTerminating(gym.Wrapper): def step(self, action): observation, reward, done, info = self.env.step(action) if done: self.env.step(np.zeros(self.env.action_space.shape)) return observation, reward, done, info def reset(self): return self.env.reset() class Timing: def __init__(self, fs, fs_ctrl): fs_ctrl_min = 50.0 assert fs_ctrl >= fs_ctrl_min, "control frequency must be at least {}".format(fs_ctrl_min) self.n_sim_per_ctrl = int(fs / fs_ctrl) assert fs == fs_ctrl * self.n_sim_per_ctrl, "sampling frequency must be a multiple of the control frequency" self.dt = 1.0 / fs self.dt_ctrl = 1.0 / fs_ctrl self.render_rate = int(fs_ctrl) class Base(gym.Env): def __init__(self, fs, fs_ctrl): super(Base, self).__init__() self._state = None self._vel_filt = None self.timing = Timing(fs, fs_ctrl) self.sensor_space = None self.state_space = None self.observation_space = None self.action_space = None self.reward_range = None self.reward_range = None self.done = False self._np_random = None self.seed() def _zero_sim_step(self): return self._sim_step([0.0]) def _sim_step(self, u): raise NotImplementedError def _ctrl_step(self, u): x = self._state u_cmd = None for _ in range(self.timing.n_sim_per_ctrl): x, u_cmd = self._sim_step(u) return x, u_cmd def _rwd(self, x, u): raise NotImplementedError
MIT License
wummel/dosage
dosagelib/util.py
backtick
python
def backtick (cmd, encoding='utf-8'): data = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0] return data.decode(encoding)
Return decoded output from command.
https://github.com/wummel/dosage/blob/a0109c3a46219f280e6e5e77183674e40da0f304/dosagelib/util.py#L115-L118
from __future__ import division, print_function try: from urllib.parse import quote as url_quote, unquote as url_unquote except ImportError: from urllib import quote as url_quote, unquote as url_unquote try: from urllib.parse import urlparse, urlunparse, urlsplit except ImportError: from urlparse import urlparse, urlunparse, urlsplit try: from urllib import robotparser except ImportError: import robotparser import requests import sys import os import cgi import re import codecs import traceback import time import subprocess try: from HTMLParser import HTMLParser except ImportError: from html.parser import HTMLParser from .decorators import memoized from .output import out from .configuration import UserAgent, AppName, App, SupportUrl MaxContentBytes = 1024 * 1024 * 2 MaxImageBytes = 1024 * 1024 * 20 MaxRetries = 3 RetryPauseSeconds = 5 ConnectionTimeoutSecs = 60 UrlEncoding = "utf-8" if hasattr(requests, 'adapters'): requests.adapters.DEFAULT_RETRIES = MaxRetries def get_system_uid(): try: if os.name == 'nt': return get_nt_system_uid() if sys.platform == 'darwin': return get_osx_system_uid() except Exception: return get_mac_uid() else: return get_mac_uid() def get_nt_system_uid(): try: import _winreg as winreg except ImportError: import winreg lm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) try: key = winreg.OpenKey(lm, r"Software\Microsoft\Cryptography") try: return winreg.QueryValueEx(key, "MachineGuid")[0] finally: key.Close() finally: lm.Close() def get_osx_system_uid(): res = backtick(["system_profile"]).splitlines() for line in res: if "r (system)" in line: return line.split(':', 1)[1].strip() raise ValueError("Could not find system number in %r" % res) def get_mac_uid(): import uuid return "%d" % uuid.getnode()
MIT License
cupy/cupy
cupy/cublas.py
dgmm
python
def dgmm(side, a, x, out=None, incx=1): assert a.ndim == 2 assert 0 <= x.ndim <= 2 assert a.dtype == x.dtype dtype = a.dtype.char if dtype == 'f': func = cublas.sdgmm elif dtype == 'd': func = cublas.ddgmm elif dtype == 'F': func = cublas.cdgmm elif dtype == 'D': func = cublas.zdgmm else: raise TypeError('invalid dtype') if side == 'L' or side == cublas.CUBLAS_SIDE_LEFT: side = cublas.CUBLAS_SIDE_LEFT elif side == 'R' or side == cublas.CUBLAS_SIDE_RIGHT: side = cublas.CUBLAS_SIDE_RIGHT else: raise ValueError('invalid side (actual: {})'.format(side)) m, n = a.shape if side == cublas.CUBLAS_SIDE_LEFT: assert x.size >= (m - 1) * abs(incx) + 1 else: assert x.size >= (n - 1) * abs(incx) + 1 if out is None: if a._c_contiguous: order = 'C' else: order = 'F' out = cupy.empty((m, n), dtype=dtype, order=order) else: assert out.ndim == 2 assert out.shape == a.shape assert out.dtype == a.dtype handle = device.get_cublas_handle() if out._c_contiguous: if not a._c_contiguous: a = a.copy(order='C') func(handle, 1 - side, n, m, a.data.ptr, n, x.data.ptr, incx, out.data.ptr, n) else: if not a._f_contiguous: a = a.copy(order='F') c = out if not out._f_contiguous: c = out.copy(order='F') func(handle, side, m, n, a.data.ptr, m, x.data.ptr, incx, c.data.ptr, m) if not out._f_contiguous: out[...] = c return out
Computes diag(x) @ a or a @ diag(x) Computes diag(x) @ a if side is 'L', a @ diag(x) if side is 'R'.
https://github.com/cupy/cupy/blob/a466b03ef0afd7c1ce1615e3f48da64ae38c1320/cupy/cublas.py#L808-L865
import numpy from numpy import linalg import warnings import cupy from cupy_backends.cuda.libs import cublas from cupy.cuda import device from cupy.linalg import _util _batched_gesv_limit = 256 def get_batched_gesv_limit(): global _batched_gesv_limit return _batched_gesv_limit def set_batched_gesv_limit(limit): global _batched_gesv_limit _batched_gesv_limit = limit def batched_gesv(a, b): _util._assert_cupy_array(a, b) _util._assert_stacked_2d(a) _util._assert_stacked_square(a) if not ((a.ndim == b.ndim or a.ndim == b.ndim + 1) and a.shape[:-1] == b.shape[:a.ndim - 1]): raise ValueError( 'a must have (..., M, M) shape and b must have (..., M) ' 'or (..., M, K)') dtype, out_dtype = _util.linalg_common_type(a, b) if dtype == 'f': t = 's' elif dtype == 'd': t = 'd' elif dtype == 'F': t = 'c' elif dtype == 'D': t = 'z' else: raise TypeError('invalid dtype') getrf = getattr(cublas, t + 'getrfBatched') getrs = getattr(cublas, t + 'getrsBatched') bs = numpy.prod(a.shape[:-2]) if a.ndim > 2 else 1 n = a.shape[-1] nrhs = b.shape[-1] if a.ndim == b.ndim else 1 b_shape = b.shape a_data_ptr = a.data.ptr b_data_ptr = b.data.ptr a = cupy.ascontiguousarray(a.reshape(bs, n, n).transpose(0, 2, 1), dtype=dtype) b = cupy.ascontiguousarray(b.reshape(bs, n, nrhs).transpose(0, 2, 1), dtype=dtype) if a.data.ptr == a_data_ptr: a = a.copy() if b.data.ptr == b_data_ptr: b = b.copy() if n > get_batched_gesv_limit(): warnings.warn('The matrix size ({}) exceeds the set limit ({})'. format(n, get_batched_gesv_limit())) handle = device.get_cublas_handle() lda = n a_step = lda * n * a.itemsize a_array = cupy.arange(a.data.ptr, a.data.ptr + a_step * bs, a_step, dtype=cupy.uintp) ldb = n b_step = ldb * nrhs * b.itemsize b_array = cupy.arange(b.data.ptr, b.data.ptr + b_step * bs, b_step, dtype=cupy.uintp) pivot = cupy.empty((bs, n), dtype=numpy.int32) dinfo = cupy.empty((bs,), dtype=numpy.int32) info = numpy.empty((1,), dtype=numpy.int32) getrf(handle, n, a_array.data.ptr, lda, pivot.data.ptr, dinfo.data.ptr, bs) _util._check_cublas_info_array_if_synchronization_allowed(getrf, dinfo) getrs(handle, cublas.CUBLAS_OP_N, n, nrhs, a_array.data.ptr, lda, pivot.data.ptr, b_array.data.ptr, ldb, info.ctypes.data, bs) if info[0] != 0: msg = 'Error reported by {} in cuBLAS. '.format(getrs.__name__) if info[0] < 0: msg += 'The {}-th parameter had an illegal value.'.format(-info[0]) raise linalg.LinAlgError(msg) return b.transpose(0, 2, 1).reshape(b_shape).astype(out_dtype, copy=False) def iamax(x, out=None): return _iamaxmin(x, out, 'amax') def iamin(x, out=None): return _iamaxmin(x, out, 'amin') def _iamaxmin(x, out, name): if x.ndim != 1: raise ValueError('x must be a 1D array (actual: {})'.format(x.ndim)) dtype = x.dtype.char if dtype == 'f': t = 's' elif dtype == 'd': t = 'd' elif dtype == 'F': t = 'c' elif dtype == 'D': t = 'z' else: raise TypeError('invalid dtype') func = getattr(cublas, 'i' + t + name) handle = device.get_cublas_handle() result_dtype = 'i' result_ptr, result, orig_mode = _setup_result_ptr( handle, out, result_dtype) try: func(handle, x.size, x.data.ptr, 1, result_ptr) finally: cublas.setPointerMode(handle, orig_mode) if out is None: out = result elif out.dtype != result_dtype: out[...] = result return out def asum(x, out=None): if x.ndim != 1: raise ValueError('x must be a 1D array (actual: {})'.format(x.ndim)) dtype = x.dtype.char if dtype == 'f': func = cublas.sasum elif dtype == 'd': func = cublas.dasum elif dtype == 'F': func = cublas.scasum elif dtype == 'D': func = cublas.dzasum else: raise TypeError('invalid dtype') handle = device.get_cublas_handle() result_dtype = dtype.lower() result_ptr, result, orig_mode = _setup_result_ptr( handle, out, result_dtype) try: func(handle, x.size, x.data.ptr, 1, result_ptr) finally: cublas.setPointerMode(handle, orig_mode) if out is None: out = result elif out.dtype != result_dtype: out[...] = result return out def axpy(a, x, y): _check_two_vectors(x, y) dtype = x.dtype.char if dtype == 'f': func = cublas.saxpy elif dtype == 'd': func = cublas.daxpy elif dtype == 'F': func = cublas.caxpy elif dtype == 'D': func = cublas.zaxpy else: raise TypeError('invalid dtype') handle = device.get_cublas_handle() a, a_ptr, orig_mode = _setup_scalar_ptr(handle, a, dtype) try: func(handle, x.size, a_ptr, x.data.ptr, 1, y.data.ptr, 1) finally: cublas.setPointerMode(handle, orig_mode) def dot(x, y, out=None): dtype = x.dtype.char if dtype == 'f': func = cublas.sdot elif dtype == 'd': func = cublas.ddot elif dtype in 'FD': raise TypeError('Use dotu() or dotc() for complex dtype') else: raise TypeError('invalid dtype') _check_two_vectors(x, y) handle = device.get_cublas_handle() result_dtype = dtype result_ptr, result, orig_mode = _setup_result_ptr( handle, out, result_dtype) try: func(handle, x.size, x.data.ptr, 1, y.data.ptr, 1, result_ptr) finally: cublas.setPointerMode(handle, orig_mode) if out is None: out = result elif out.dtype != result_dtype: out[...] = result return out def dotu(x, y, out=None): dtype = x.dtype.char if dtype in 'fd': return dot(x, y, out=out) elif dtype == 'F': func = cublas.cdotu elif dtype == 'D': func = cublas.zdotu else: raise TypeError('invalid dtype') _check_two_vectors(x, y) handle = device.get_cublas_handle() result_dtype = dtype result_ptr, result, orig_mode = _setup_result_ptr( handle, out, result_dtype) try: func(handle, x.size, x.data.ptr, 1, y.data.ptr, 1, result_ptr) finally: cublas.setPointerMode(handle, orig_mode) if out is None: out = result elif out.dtype != result_dtype: out[...] = result return out def dotc(x, y, out=None): dtype = x.dtype.char if dtype in 'fd': return dot(x, y, out=out) elif dtype == 'F': func = cublas.cdotc elif dtype == 'D': func = cublas.zdotc else: raise TypeError('invalid dtype') _check_two_vectors(x, y) handle = device.get_cublas_handle() result_dtype = dtype result_ptr, result, orig_mode = _setup_result_ptr( handle, out, result_dtype) try: func(handle, x.size, x.data.ptr, 1, y.data.ptr, 1, result_ptr) finally: cublas.setPointerMode(handle, orig_mode) if out is None: out = result elif out.dtype != result_dtype: out[...] = result return out def nrm2(x, out=None): if x.ndim != 1: raise ValueError('x must be a 1D array (actual: {})'.format(x.ndim)) dtype = x.dtype.char if dtype == 'f': func = cublas.snrm2 elif dtype == 'd': func = cublas.dnrm2 elif dtype == 'F': func = cublas.scnrm2 elif dtype == 'D': func = cublas.dznrm2 else: raise TypeError('invalid dtype') handle = device.get_cublas_handle() result_dtype = dtype.lower() result_ptr, result, orig_mode = _setup_result_ptr( handle, out, result_dtype) try: func(handle, x.size, x.data.ptr, 1, result_ptr) finally: cublas.setPointerMode(handle, orig_mode) if out is None: out = result elif out.dtype != result_dtype: out[...] = result return out def scal(a, x): if x.ndim != 1: raise ValueError('x must be a 1D array (actual: {})'.format(x.ndim)) dtype = x.dtype.char if dtype == 'f': func = cublas.sscal elif dtype == 'd': func = cublas.dscal elif dtype == 'F': func = cublas.cscal elif dtype == 'D': func = cublas.zscal else: raise TypeError('invalid dtype') handle = device.get_cublas_handle() a, a_ptr, orig_mode = _setup_scalar_ptr(handle, a, dtype) try: func(handle, x.size, a_ptr, x.data.ptr, 1) finally: cublas.setPointerMode(handle, orig_mode) def _check_two_vectors(x, y): if x.ndim != 1: raise ValueError('x must be a 1D array (actual: {})'.format(x.ndim)) if y.ndim != 1: raise ValueError('y must be a 1D array (actual: {})'.format(y.ndim)) if x.size != y.size: raise ValueError('x and y must be the same size (actual: {} and {})' ''.format(x.size, y.size)) if x.dtype != y.dtype: raise TypeError('x and y must be the same dtype (actual: {} and {})' ''.format(x.dtype, y.dtype)) def _setup_result_ptr(handle, out, dtype): mode = cublas.getPointerMode(handle) if out is None or isinstance(out, cupy.ndarray): if out is None or out.dtype != dtype: result = cupy.empty([], dtype=dtype) else: result = out result_ptr = result.data.ptr cublas.setPointerMode(handle, cublas.CUBLAS_POINTER_MODE_DEVICE) elif isinstance(out, numpy.ndarray): if out.dtype != dtype: result = numpy.empty([], dtype=dtype) else: result = out result_ptr = result.ctypes.data cublas.setPointerMode(handle, cublas.CUBLAS_POINTER_MODE_HOST) else: raise TypeError('out must be either cupy or numpy ndarray') return result_ptr, result, mode def _setup_scalar_ptr(handle, a, dtype): a, a_ptr = _get_scalar_ptr(a, dtype) mode = cublas.getPointerMode(handle) if isinstance(a, cupy.ndarray): cublas.setPointerMode(handle, cublas.CUBLAS_POINTER_MODE_DEVICE) else: cublas.setPointerMode(handle, cublas.CUBLAS_POINTER_MODE_HOST) return a, a_ptr, mode def _get_scalar_ptr(a, dtype): if isinstance(a, cupy.ndarray): if a.dtype != dtype: a = cupy.array(a, dtype=dtype) a_ptr = a.data.ptr else: if not (isinstance(a, numpy.ndarray) and a.dtype == dtype): a = numpy.array(a, dtype=dtype) a_ptr = a.ctypes.data return a, a_ptr def gemv(transa, alpha, a, x, beta, y): dtype = a.dtype.char if dtype == 'f': func = cublas.sgemv elif dtype == 'd': func = cublas.dgemv elif dtype == 'F': func = cublas.cgemv elif dtype == 'D': func = cublas.zgemv else: raise TypeError('invalid dtype') assert a.ndim == 2 assert x.ndim == y.ndim == 1 assert a.dtype == x.dtype == y.dtype m, n = a.shape transa = _trans_to_cublas_op(transa) if transa == cublas.CUBLAS_OP_N: xlen, ylen = n, m else: xlen, ylen = m, n assert x.shape[0] == xlen assert y.shape[0] == ylen alpha, alpha_ptr = _get_scalar_ptr(alpha, a.dtype) beta, beta_ptr = _get_scalar_ptr(beta, a.dtype) handle = device.get_cublas_handle() orig_mode = cublas.getPointerMode(handle) if isinstance(alpha, cupy.ndarray) or isinstance(beta, cupy.ndarray): if not isinstance(alpha, cupy.ndarray): alpha = cupy.array(alpha) alpha_ptr = alpha.data.ptr if not isinstance(beta, cupy.ndarray): beta = cupy.array(beta) beta_ptr = beta.data.ptr cublas.setPointerMode(handle, cublas.CUBLAS_POINTER_MODE_DEVICE) else: cublas.setPointerMode(handle, cublas.CUBLAS_POINTER_MODE_HOST) try: if a._f_contiguous: func(handle, transa, m, n, alpha_ptr, a.data.ptr, m, x.data.ptr, 1, beta_ptr, y.data.ptr, 1) elif a._c_contiguous and transa != cublas.CUBLAS_OP_C: if transa == cublas.CUBLAS_OP_N: transa = cublas.CUBLAS_OP_T else: transa = cublas.CUBLAS_OP_N func(handle, transa, n, m, alpha_ptr, a.data.ptr, n, x.data.ptr, 1, beta_ptr, y.data.ptr, 1) else: a = a.copy(order='F') func(handle, transa, m, n, alpha_ptr, a.data.ptr, m, x.data.ptr, 1, beta_ptr, y.data.ptr, 1) finally: cublas.setPointerMode(handle, orig_mode) def ger(alpha, x, y, a): dtype = a.dtype.char if dtype == 'f': func = cublas.sger elif dtype == 'd': func = cublas.dger elif dtype in 'FD': raise TypeError('Use geru or gerc for complex dtypes') else: raise TypeError('invalid dtype') assert a.ndim == 2 assert x.ndim == y.ndim == 1 assert a.dtype == x.dtype == y.dtype m, n = a.shape assert x.shape[0] == m assert y.shape[0] == n handle = device.get_cublas_handle() alpha, alpha_ptr, orig_mode = _setup_scalar_ptr(handle, alpha, dtype) x_ptr, y_ptr = x.data.ptr, y.data.ptr try: if a._f_contiguous: func(handle, m, n, alpha_ptr, x_ptr, 1, y_ptr, 1, a.data.ptr, m) elif a._c_contiguous: func(handle, n, m, alpha_ptr, y_ptr, 1, x_ptr, 1, a.data.ptr, n) else: aa = a.copy(order='F') func(handle, m, n, alpha_ptr, x_ptr, 1, y_ptr, 1, aa.data.ptr, m) a[...] = aa finally: cublas.setPointerMode(handle, orig_mode) def geru(alpha, x, y, a): dtype = a.dtype.char if dtype in 'fd': return ger(alpha, x, y, a) elif dtype == 'F': func = cublas.cgeru elif dtype == 'D': func = cublas.zgeru else: raise TypeError('invalid dtype') assert a.ndim == 2 assert x.ndim == y.ndim == 1 assert a.dtype == x.dtype == y.dtype m, n = a.shape assert x.shape[0] == m assert y.shape[0] == n handle = device.get_cublas_handle() alpha, alpha_ptr, orig_mode = _setup_scalar_ptr(handle, alpha, dtype) x_ptr, y_ptr = x.data.ptr, y.data.ptr try: if a._f_contiguous: func(handle, m, n, alpha_ptr, x_ptr, 1, y_ptr, 1, a.data.ptr, m) elif a._c_contiguous: func(handle, n, m, alpha_ptr, y_ptr, 1, x_ptr, 1, a.data.ptr, n) else: aa = a.copy(order='F') func(handle, m, n, alpha_ptr, x_ptr, 1, y_ptr, 1, aa.data.ptr, m) a[...] = aa finally: cublas.setPointerMode(handle, orig_mode) def gerc(alpha, x, y, a): dtype = a.dtype.char if dtype in 'fd': return ger(alpha, x, y, a) elif dtype == 'F': func = cublas.cgerc elif dtype == 'D': func = cublas.zgerc else: raise TypeError('invalid dtype') assert a.ndim == 2 assert x.ndim == y.ndim == 1 assert a.dtype == x.dtype == y.dtype m, n = a.shape assert x.shape[0] == m assert y.shape[0] == n handle = device.get_cublas_handle() alpha, alpha_ptr, orig_mode = _setup_scalar_ptr(handle, alpha, dtype) x_ptr, y_ptr = x.data.ptr, y.data.ptr try: if a._f_contiguous: func(handle, m, n, alpha_ptr, x_ptr, 1, y_ptr, 1, a.data.ptr, m) else: aa = a.copy(order='F') func(handle, m, n, alpha_ptr, x_ptr, 1, y_ptr, 1, aa.data.ptr, m) a[...] = aa finally: cublas.setPointerMode(handle, orig_mode) def _trans_to_cublas_op(trans): if trans == 'N' or trans == cublas.CUBLAS_OP_N: trans = cublas.CUBLAS_OP_N elif trans == 'T' or trans == cublas.CUBLAS_OP_T: trans = cublas.CUBLAS_OP_T elif trans == 'H' or trans == cublas.CUBLAS_OP_C: trans = cublas.CUBLAS_OP_C else: raise TypeError('invalid trans (actual: {})'.fromat(trans)) return trans def _decide_ld_and_trans(a, trans): ld = None if trans in (cublas.CUBLAS_OP_N, cublas.CUBLAS_OP_T): if a._f_contiguous: ld = a.shape[0] elif a._c_contiguous: ld = a.shape[1] trans = 1 - trans return ld, trans def _change_order_if_necessary(a, lda): if lda is None: lda = a.shape[0] if not a._f_contiguous: a = a.copy(order='F') return a, lda def gemm(transa, transb, a, b, out=None, alpha=1.0, beta=0.0): assert a.ndim == b.ndim == 2 assert a.dtype == b.dtype dtype = a.dtype.char if dtype == 'f': func = cublas.sgemm elif dtype == 'd': func = cublas.dgemm elif dtype == 'F': func = cublas.cgemm elif dtype == 'D': func = cublas.zgemm else: raise TypeError('invalid dtype') transa = _trans_to_cublas_op(transa) transb = _trans_to_cublas_op(transb) if transa == cublas.CUBLAS_OP_N: m, k = a.shape else: k, m = a.shape if transb == cublas.CUBLAS_OP_N: n = b.shape[1] assert b.shape[0] == k else: n = b.shape[0] assert b.shape[1] == k if out is None: out = cupy.empty((m, n), dtype=dtype, order='F') beta = 0.0 else: assert out.ndim == 2 assert out.shape == (m, n) assert out.dtype == dtype alpha, alpha_ptr = _get_scalar_ptr(alpha, a.dtype) beta, beta_ptr = _get_scalar_ptr(beta, a.dtype) handle = device.get_cublas_handle() orig_mode = cublas.getPointerMode(handle) if isinstance(alpha, cupy.ndarray) or isinstance(beta, cupy.ndarray): if not isinstance(alpha, cupy.ndarray): alpha = cupy.array(alpha) alpha_ptr = alpha.data.ptr if not isinstance(beta, cupy.ndarray): beta = cupy.array(beta) beta_ptr = beta.data.ptr cublas.setPointerMode(handle, cublas.CUBLAS_POINTER_MODE_DEVICE) else: cublas.setPointerMode(handle, cublas.CUBLAS_POINTER_MODE_HOST) lda, transa = _decide_ld_and_trans(a, transa) ldb, transb = _decide_ld_and_trans(b, transb) if not (lda is None or ldb is None): if out._f_contiguous: try: func(handle, transa, transb, m, n, k, alpha_ptr, a.data.ptr, lda, b.data.ptr, ldb, beta_ptr, out.data.ptr, m) finally: cublas.setPointerMode(handle, orig_mode) return out elif out._c_contiguous: try: func(handle, 1 - transb, 1 - transa, n, m, k, alpha_ptr, b.data.ptr, ldb, a.data.ptr, lda, beta_ptr, out.data.ptr, n) finally: cublas.setPointerMode(handle, orig_mode) return out a, lda = _change_order_if_necessary(a, lda) b, ldb = _change_order_if_necessary(b, ldb) c = out if not out._f_contiguous: c = out.copy(order='F') try: func(handle, transa, transb, m, n, k, alpha_ptr, a.data.ptr, lda, b.data.ptr, ldb, beta_ptr, c.data.ptr, m) finally: cublas.setPointerMode(handle, orig_mode) if not out._f_contiguous: out[...] = c return out def geam(transa, transb, alpha, a, beta, b, out=None): assert a.ndim == b.ndim == 2 assert a.dtype == b.dtype dtype = a.dtype.char if dtype == 'f': func = cublas.sgeam elif dtype == 'd': func = cublas.dgeam elif dtype == 'F': func = cublas.cgeam elif dtype == 'D': func = cublas.zgeam else: raise TypeError('invalid dtype') transa = _trans_to_cublas_op(transa) transb = _trans_to_cublas_op(transb) if transa == cublas.CUBLAS_OP_N: m, n = a.shape else: n, m = a.shape if transb == cublas.CUBLAS_OP_N: assert b.shape == (m, n) else: assert b.shape == (n, m) if out is None: out = cupy.empty((m, n), dtype=dtype, order='F') else: assert out.ndim == 2 assert out.shape == (m, n) assert out.dtype == dtype alpha, alpha_ptr = _get_scalar_ptr(alpha, a.dtype) beta, beta_ptr = _get_scalar_ptr(beta, a.dtype) handle = device.get_cublas_handle() orig_mode = cublas.getPointerMode(handle) if isinstance(alpha, cupy.ndarray) or isinstance(beta, cupy.ndarray): if not isinstance(alpha, cupy.ndarray): alpha = cupy.array(alpha) alpha_ptr = alpha.data.ptr if not isinstance(beta, cupy.ndarray): beta = cupy.array(beta) beta_ptr = beta.data.ptr cublas.setPointerMode(handle, cublas.CUBLAS_POINTER_MODE_DEVICE) else: cublas.setPointerMode(handle, cublas.CUBLAS_POINTER_MODE_HOST) lda, transa = _decide_ld_and_trans(a, transa) ldb, transb = _decide_ld_and_trans(b, transb) if not (lda is None or ldb is None): if out._f_contiguous: try: func(handle, transa, transb, m, n, alpha_ptr, a.data.ptr, lda, beta_ptr, b.data.ptr, ldb, out.data.ptr, m) finally: cublas.setPointerMode(handle, orig_mode) return out elif out._c_contiguous: try: func(handle, 1-transa, 1-transb, n, m, alpha_ptr, a.data.ptr, lda, beta_ptr, b.data.ptr, ldb, out.data.ptr, n) finally: cublas.setPointerMode(handle, orig_mode) return out a, lda = _change_order_if_necessary(a, lda) b, ldb = _change_order_if_necessary(b, ldb) c = out if not out._f_contiguous: c = out.copy(order='F') try: func(handle, transa, transb, m, n, alpha_ptr, a.data.ptr, lda, beta_ptr, b.data.ptr, ldb, c.data.ptr, m) finally: cublas.setPointerMode(handle, orig_mode) if not out._f_contiguous: out[...] = c return out
MIT License
flyteorg/flytekit
flytekit/models/core/identifier.py
NodeExecutionIdentifier.from_flyte_idl
python
def from_flyte_idl(cls, p): return cls( node_id=p.node_id, execution_id=WorkflowExecutionIdentifier.from_flyte_idl(p.execution_id), )
:param flyteidl.core.identifier_pb2.NodeExecutionIdentifier p: :rtype: NodeExecutionIdentifier
https://github.com/flyteorg/flytekit/blob/6c032035563ae645b0b93558b3fe3362080057ea/flytekit/models/core/identifier.py#L182-L190
from flyteidl.core import identifier_pb2 as _identifier_pb2 from flytekit.models import common as _common_models class ResourceType(object): UNSPECIFIED = _identifier_pb2.UNSPECIFIED TASK = _identifier_pb2.TASK WORKFLOW = _identifier_pb2.WORKFLOW LAUNCH_PLAN = _identifier_pb2.LAUNCH_PLAN class Identifier(_common_models.FlyteIdlEntity): def __init__(self, resource_type, project, domain, name, version): self._resource_type = resource_type self._project = project self._domain = domain self._name = name self._version = version @property def resource_type(self): return self._resource_type def resource_type_name(self) -> str: return _identifier_pb2.ResourceType.Name(self.resource_type) @property def project(self): return self._project @property def domain(self): return self._domain @property def name(self): return self._name @property def version(self): return self._version def to_flyte_idl(self): return _identifier_pb2.Identifier( resource_type=self.resource_type, project=self.project, domain=self.domain, name=self.name, version=self.version, ) @classmethod def from_flyte_idl(cls, p): return cls( resource_type=p.resource_type, project=p.project, domain=p.domain, name=p.name, version=p.version, ) class WorkflowExecutionIdentifier(_common_models.FlyteIdlEntity): def __init__(self, project, domain, name): self._project = project self._domain = domain self._name = name @property def project(self): return self._project @property def domain(self): return self._domain @property def name(self): return self._name def to_flyte_idl(self): return _identifier_pb2.WorkflowExecutionIdentifier( project=self.project, domain=self.domain, name=self.name, ) @classmethod def from_flyte_idl(cls, p): return cls( project=p.project, domain=p.domain, name=p.name, ) class NodeExecutionIdentifier(_common_models.FlyteIdlEntity): def __init__(self, node_id, execution_id): self._node_id = node_id self._execution_id = execution_id @property def node_id(self): return self._node_id @property def execution_id(self): return self._execution_id def to_flyte_idl(self): return _identifier_pb2.NodeExecutionIdentifier( node_id=self.node_id, execution_id=self.execution_id.to_flyte_idl(), ) @classmethod
Apache License 2.0
lagg/steamodd
steam/user.py
friend_list.count
python
def count(self): return len(self._friends)
Returns number of friends
https://github.com/lagg/steamodd/blob/2e9ced4e7a6dbe3e09d5a648450bafc12b937b95/steam/user.py#L458-L460
import time import os from . import api class ProfileError(api.APIError): pass class ProfileNotFoundError(ProfileError): pass class VanityError(ProfileError): pass class BansError(ProfileError): pass class BansNotFoundError(BansError): pass class vanity_url(object): @property def id64(self): if self._cache: return self._cache res = None try: res = self._api["response"] self._cache = int(res["steamid"]) except KeyError: if not self._cache: if res: raise VanityError(res.get("message", "Invalid vanity response")) else: raise VanityError("Empty vanity response") return self._cache def __str__(self): return str(self.id64) def __init__(self, vanity, **kwargs): vanity = os.path.basename(str(vanity).strip('/')) self._cache = None self._api = api.interface("ISteamUser").ResolveVanityURL(vanityurl=vanity, **kwargs) class profile(object): @property def id64(self): return int(self._prof["steamid"]) @property def id32(self): return int(self.id64) - 76561197960265728 @property def persona(self): return self._prof["personaname"] @property def profile_url(self): return self._prof["profileurl"] @property def vanity(self): purl = self.profile_url.strip('/') if purl.find("/id/") != -1: return os.path.basename(purl) @property def avatar_small(self): return self._prof["avatar"] @property def avatar_medium(self): return self._prof["avatarmedium"] @property def avatar_large(self): return self._prof["avatarfull"] @property def status(self): return self._prof["personastate"] @property def visibility(self): return self._prof["communityvisibilitystate"] @property def configured(self): return bool(self._prof.get("profilestate")) @property def last_online(self): return time.localtime(self._prof["lastlogoff"]) @property def comments_enabled(self): return bool(self._prof.get("commentpermission")) @property def real_name(self): return self._prof.get("realname") @property def primary_group(self): return self._prof.get("primaryclanid") @property def creation_date(self): timestamp = self._prof.get("timecreated") if timestamp: return time.localtime(timestamp) @property def current_game(self): obj = self._prof gameid = obj.get("gameid") gameserverip = obj.get("gameserverip") gameextrainfo = obj.get("gameextrainfo") return (int(gameid) if gameid else None, gameserverip, gameextrainfo) @property def location(self): obj = self._prof return (obj.get("locstatecode"), obj.get("loccountrycode")) @property def lobbysteamid(self): return int(self._prof.get("lobbysteamid", 0)) @property def _prof(self): if not self._cache: try: res = self._api["response"]["players"] try: self._cache = res[0] except IndexError: raise ProfileNotFoundError("Profile not found") except KeyError: raise ProfileError("Bad player profile results returned") return self._cache @property def level(self): level_key = "player_level" if level_key in self._api["response"]: return self._api["response"][level_key] try: lvl = api.interface("IPlayerService").GetSteamLevel(steamid=self.id64)["response"][level_key] self._api["response"][level_key] = lvl return lvl except: return -1 @classmethod def from_def(cls, obj): prof = cls(obj["steamid"]) prof._cache = obj return prof def __str__(self): return self.persona or str(self.id64) def __init__(self, sid, **kwargs): try: sid = sid.id64 except AttributeError: sid = os.path.basename(str(sid).strip('/')) self._cache = {} self._api = api.interface("ISteamUser").GetPlayerSummaries(version=2, steamids=sid, **kwargs) class _batched_request(object): def __init__(self, batch, batchsize=100): self._batches = [] batchlen, rem = divmod(len(batch), batchsize) if rem > 0: batchlen += 1 for i in range(batchlen): offset = i * batchsize batch_chunk = batch[offset:offset + batchsize] self._batches.append(list(self._process_batch(batch_chunk))) def _process_batch(self, batch): return batch def _call_method(self, batch): raise NotImplementedError def __iter__(self): return next(self) def __next__(self): for batch in self._batches: for result in self._call_method(batch): yield result next = __next__ class profile_batch(_batched_request): def __init__(self, sids): super(profile_batch, self).__init__(sids) def _process_batch(self, batch): processed = set() for sid in batch: try: sid = sid.id64 except AttributeError: sid = os.path.basename(str(sid).strip('/')) processed.add(str(sid)) return processed def _call_method(self, batch): response = api.interface("ISteamUser").GetPlayerSummaries(version=2, steamids=','.join(batch)) return [profile.from_def(player) for player in response["response"]["players"]] class bans(object): def __init__(self, sid, **kwargs): try: sid = sid.id64 except AttributeError: sid = os.path.basename(str(sid).strip('/')) self._cache = {} self._api = api.interface("ISteamUser").GetPlayerBans(steamids=sid, **kwargs) @property def _bans(self): if not self._cache: try: res = self._api["players"] try: self._cache = res[0] except IndexError: raise BansNotFoundError("No ban results for this profile") except KeyError: raise BansError("Bad ban data returned") return self._cache @property def id64(self): return int(self._bans["SteamId"]) @property def community(self): return self._bans["CommunityBanned"] @property def vac(self): return self._bans["VACBanned"] @property def vac_count(self): return self._bans["NumberOfVACBans"] @property def days_unbanned(self): return self._bans["DaysSinceLastBan"] @property def economy(self): return self._bans["EconomyBan"] @property def game_count(self): return self._bans["NumberOfGameBans"] @classmethod def from_def(cls, obj): instance = cls(int(obj["SteamId"])) instance._cache = obj return instance class bans_batch(_batched_request): def __init__(self, sids): super(bans_batch, self).__init__(sids) def _process_batch(self, batch): processed = set() for sid in batch: try: sid = sid.id64 except AttributeError: sid = os.path.basename(str(sid).strip('/')) processed.add(str(sid)) return processed def _call_method(self, batch): response = api.interface("ISteamUser").GetPlayerBans(steamids=','.join(batch)) return [bans.from_def(player) for player in response["players"]] class friend(object): def __init__(self, friend_dict): self._friend_dict = friend_dict @property def steamid(self): return int(self._friend_dict["steamid"]) @property def relationship(self): return self._friend_dict["relationship"] @property def since(self): return time.localtime(self._friend_dict["friend_since"]) class friend_list(object): def __init__(self, sid, relationship="all", **kwargs): try: sid = sid.id64 except AttributeError: sid = os.path.basename(str(sid).strip('/')) self._api = api.interface("ISteamUser").GetFriendList(steamid=sid, relationship=relationship, **kwargs) try: self._friends = self._api["friendslist"]["friends"] except api.HTTPFileNotFoundError: raise ProfileNotFoundError("Profile not found") except api.HTTPInternalServerError: raise ProfileNotFoundError("Invalid Steam ID given") self.index = 0 @property
ISC License
systemsbioinformatics/stochpy
stochpy/tools/ParseDistributions.py
retrieve_index
python
def retrieve_index(input_, lst_names): if input_ in lst_names: index = lst_names.index(input_) else: if isinstance(input_,int) and input_ < len(lst_names): index = input_ else: raise Warning("'{0}' is not recognized. Choose one of {1} or {2}.".format(input_, lst_names, range(len(lst_names)))) return index
Converts identifiers (or indices) to indices Input: - *name_or_index* - *lst_names*
https://github.com/systemsbioinformatics/stochpy/blob/48f07e4a235158744680a94ed41c84aee040362d/stochpy/tools/ParseDistributions.py#L14-L29
import sys, numpy as np
BSD 3-Clause New or Revised License
pycontribs/pyrax
pyrax/autoscale.py
ScalingGroupManager.update_webhook_metadata
python
def update_webhook_metadata(self, scaling_group, policy, webhook, metadata): if not isinstance(webhook, AutoScaleWebhook): webhook = self.get_webhook(scaling_group, policy, webhook) curr_meta = webhook.metadata or {} curr_meta.update(metadata) return self.update_webhook(scaling_group, policy, webhook, metadata=curr_meta)
Adds the given metadata dict to the existing metadata for the specified webhook.
https://github.com/pycontribs/pyrax/blob/a0c022981f76a4cba96a22ecc19bb52843ac4fbe/pyrax/autoscale.py#L706-L716
from __future__ import absolute_import, unicode_literals import base64 import pyrax from pyrax.client import BaseClient from pyrax.cloudloadbalancers import CloudLoadBalancer import pyrax.exceptions as exc from pyrax.manager import BaseManager from pyrax.resource import BaseResource import pyrax.utils as utils class ScalingGroup(BaseResource): def __init__(self, *args, **kwargs): super(ScalingGroup, self).__init__(*args, **kwargs) self._non_display = ["active", "launchConfiguration", "links", "groupConfiguration", "policies", "scalingPolicies"] self._repr_properties = ["name", "cooldown", "metadata", "min_entities", "max_entities"] self._make_policies() def _make_policies(self): self.policies = [AutoScalePolicy(self.manager, dct, self) for dct in self.scalingPolicies] def get_state(self): return self.manager.get_state(self) def pause(self): return self.manager.pause(self) def resume(self): return self.manager.resume(self) def update(self, name=None, cooldown=None, min_entities=None, max_entities=None, metadata=None): return self.manager.update(self, name=name, cooldown=cooldown, min_entities=min_entities, max_entities=max_entities, metadata=metadata) def update_metadata(self, metadata): return self.manager.update_metadata(self, metadata=metadata) def get_configuration(self): return self.manager.get_configuration(self) def get_launch_config(self): return self.manager.get_launch_config(self) def update_launch_config(self, server_name=None, image=None, flavor=None, disk_config=None, metadata=None, personality=None, networks=None, load_balancers=None, key_name=None, config_drive=False, user_data=None): return self.manager.update_launch_config(self, server_name=server_name, image=image, flavor=flavor, disk_config=disk_config, metadata=metadata, personality=personality, networks=networks, load_balancers=load_balancers, key_name=key_name, config_drive=config_drive, user_data=user_data) def update_launch_metadata(self, metadata): return self.manager.update_launch_metadata(self, metadata) def add_policy(self, name, policy_type, cooldown, change=None, is_percent=False, desired_capacity=None, args=None): return self.manager.add_policy(self, name, policy_type, cooldown, change=change, is_percent=is_percent, desired_capacity=desired_capacity, args=args) def list_policies(self): return self.manager.list_policies(self) def get_policy(self, policy): return self.manager.get_policy(self, policy) def update_policy(self, policy, name=None, policy_type=None, cooldown=None, change=None, is_percent=False, desired_capacity=None, args=None): return self.manager.update_policy(scaling_group=self, policy=policy, name=name, policy_type=policy_type, cooldown=cooldown, change=change, is_percent=is_percent, desired_capacity=desired_capacity, args=args) def execute_policy(self, policy): return self.manager.execute_policy(scaling_group=self, policy=policy) def delete_policy(self, policy): return self.manager.delete_policy(scaling_group=self, policy=policy) def add_webhook(self, policy, name, metadata=None): return self.manager.add_webhook(self, policy, name, metadata=metadata) def list_webhooks(self, policy): return self.manager.list_webhooks(self, policy) def update_webhook(self, policy, webhook, name=None, metadata=None): return self.manager.update_webhook(scaling_group=self, policy=policy, webhook=webhook, name=name, metadata=metadata) def update_webhook_metadata(self, policy, webhook, metadata): return self.manager.update_webhook_metadata(self, policy, webhook, metadata) def delete_webhook(self, policy, webhook): return self.manager.delete_webhook(self, policy, webhook) @property def policy_count(self): return len(self.policies) @property def name(self): return self.groupConfiguration.get("name") @name.setter def name(self, val): self.groupConfiguration["name"] = val @property def cooldown(self): return self.groupConfiguration.get("cooldown") @cooldown.setter def cooldown(self, val): self.groupConfiguration["cooldown"] = val @property def metadata(self): return self.groupConfiguration.get("metadata") @metadata.setter def metadata(self, val): self.groupConfiguration["metadata"] = val @property def min_entities(self): return self.groupConfiguration.get("minEntities") @min_entities.setter def min_entities(self, val): self.groupConfiguration["minEntities"] = val @property def max_entities(self): return self.groupConfiguration.get("maxEntities") @max_entities.setter def max_entities(self, val): self.groupConfiguration["maxEntities"] = val class ScalingGroupManager(BaseManager): def __init__(self, api, resource_class=None, response_key=None, plural_response_key=None, uri_base=None): super(ScalingGroupManager, self).__init__(api, resource_class=resource_class, response_key=response_key, plural_response_key=plural_response_key, uri_base=uri_base) def get_state(self, scaling_group): uri = "/%s/%s/state" % (self.uri_base, utils.get_id(scaling_group)) resp, resp_body = self.api.method_get(uri) data = resp_body["group"] ret = {} ret["active"] = [itm["id"] for itm in data["active"]] ret["active_capacity"] = data["activeCapacity"] ret["desired_capacity"] = data["desiredCapacity"] ret["pending_capacity"] = data["pendingCapacity"] ret["paused"] = data["paused"] return ret def pause(self, scaling_group): uri = "/%s/%s/pause" % (self.uri_base, utils.get_id(scaling_group)) resp, resp_body = self.api.method_post(uri) return None def resume(self, scaling_group): uri = "/%s/%s/resume" % (self.uri_base, utils.get_id(scaling_group)) resp, resp_body = self.api.method_post(uri) return None def get_configuration(self, scaling_group): uri = "/%s/%s/config" % (self.uri_base, utils.get_id(scaling_group)) resp, resp_body = self.api.method_get(uri) return resp_body.get("groupConfiguration") def replace(self, scaling_group, name, cooldown, min_entities, max_entities, metadata=None): body = self._create_group_config_body(name, cooldown, min_entities, max_entities, metadata=metadata) group_id = utils.get_id(scaling_group) uri = "/%s/%s/config" % (self.uri_base, group_id) resp, resp_body = self.api.method_put(uri, body=body) def update(self, scaling_group, name=None, cooldown=None, min_entities=None, max_entities=None, metadata=None): if not isinstance(scaling_group, ScalingGroup): scaling_group = self.get(scaling_group) uri = "/%s/%s/config" % (self.uri_base, scaling_group.id) if cooldown is None: cooldown = scaling_group.cooldown if min_entities is None: min_entities = scaling_group.min_entities if max_entities is None: max_entities = scaling_group.max_entities body = {"name": name or scaling_group.name, "cooldown": cooldown, "minEntities": min_entities, "maxEntities": max_entities, "metadata": metadata or scaling_group.metadata, } resp, resp_body = self.api.method_put(uri, body=body) return None def update_metadata(self, scaling_group, metadata): if not isinstance(scaling_group, ScalingGroup): scaling_group = self.get(scaling_group) curr_meta = scaling_group.metadata curr_meta.update(metadata) return self.update(scaling_group, metadata=curr_meta) def get_launch_config(self, scaling_group): key_map = { "OS-DCF:diskConfig": "disk_config", "flavorRef": "flavor", "imageRef": "image", } uri = "/%s/%s/launch" % (self.uri_base, utils.get_id(scaling_group)) resp, resp_body = self.api.method_get(uri) ret = {} data = resp_body.get("launchConfiguration") ret["type"] = data.get("type") args = data.get("args", {}) ret["load_balancers"] = args.get("loadBalancers") for key, value in args.get("server", {}).items(): norm_key = key_map.get(key, key) ret[norm_key] = value return ret def replace_launch_config(self, scaling_group, launch_config_type, server_name, image, flavor, disk_config=None, metadata=None, personality=None, networks=None, load_balancers=None, key_name=None, config_drive=False, user_data=None): group_id = utils.get_id(scaling_group) uri = "/%s/%s/launch" % (self.uri_base, group_id) body = self._create_launch_config_body( launch_config_type=launch_config_type, server_name=server_name, image=image, flavor=flavor, disk_config=disk_config, metadata=metadata, personality=personality, networks=networks, load_balancers=load_balancers, key_name=key_name, config_drive=config_drive, user_data=user_data) resp, resp_body = self.api.method_put(uri, body=body) def update_launch_config(self, scaling_group, server_name=None, image=None, flavor=None, disk_config=None, metadata=None, personality=None, networks=None, load_balancers=None, key_name=None, config_drive=False, user_data=None): if not isinstance(scaling_group, ScalingGroup): scaling_group = self.get(scaling_group) uri = "/%s/%s/launch" % (self.uri_base, scaling_group.id) largs = scaling_group.launchConfiguration.get("args", {}) srv_args = largs.get("server", {}) lb_args = largs.get("loadBalancers", {}) flav = flavor or srv_args.get("flavorRef") dconf = disk_config or srv_args.get("OS-DCF:diskConfig", "AUTO") if personality is None: personality = srv_args.get("personality", []) cfg_drv = config_drive or srv_args.get("config_drive") if user_data: user_data = base64.b64encode(user_data) usr_data = user_data or srv_args.get("user_data") update_metadata = metadata or srv_args.get("metadata") body = {"type": "launch_server", "args": { "server": { "name": server_name or srv_args.get("name"), "imageRef": image or srv_args.get("imageRef"), "flavorRef": flav, "OS-DCF:diskConfig": dconf, "networks": networks or srv_args.get("networks"), }, "loadBalancers": load_balancers or lb_args, }, } bas = body["args"]["server"] if cfg_drv: bas["config_drive"] = cfg_drv if usr_data: bas["user_data"] = usr_data if personality: bas["personality"] = self._encode_personality(personality) if update_metadata: bas["metadata"] = update_metadata key_name = key_name or srv_args.get("key_name") if key_name: bas["key_name"] = key_name resp, resp_body = self.api.method_put(uri, body=body) return None def update_launch_metadata(self, scaling_group, metadata): if not isinstance(scaling_group, ScalingGroup): scaling_group = self.get(scaling_group) curr_meta = scaling_group.launchConfiguration.get("args", {}).get( "server", {}).get("metadata", {}) curr_meta.update(metadata) return self.update_launch_config(scaling_group, metadata=curr_meta) def add_policy(self, scaling_group, name, policy_type, cooldown, change=None, is_percent=False, desired_capacity=None, args=None): uri = "/%s/%s/policies" % (self.uri_base, utils.get_id(scaling_group)) body = self._create_policy_body(name, policy_type, cooldown, change=change, is_percent=is_percent, desired_capacity=desired_capacity, args=args) body = [body] resp, resp_body = self.api.method_post(uri, body=body) pol_info = resp_body.get("policies")[0] return AutoScalePolicy(self, pol_info, scaling_group) def _create_policy_body(self, name, policy_type, cooldown, change=None, is_percent=None, desired_capacity=None, args=None): body = {"name": name, "cooldown": cooldown, "type": policy_type} if change is not None: if is_percent: body["changePercent"] = change else: body["change"] = change if desired_capacity is not None: body["desiredCapacity"] = desired_capacity if args is not None: body["args"] = args return body def list_policies(self, scaling_group): uri = "/%s/%s/policies" % (self.uri_base, utils.get_id(scaling_group)) resp, resp_body = self.api.method_get(uri) return [AutoScalePolicy(self, data, scaling_group) for data in resp_body.get("policies", [])] def get_policy(self, scaling_group, policy): uri = "/%s/%s/policies/%s" % (self.uri_base, utils.get_id(scaling_group), utils.get_id(policy)) resp, resp_body = self.api.method_get(uri) data = resp_body.get("policy") return AutoScalePolicy(self, data, scaling_group) def replace_policy(self, scaling_group, policy, name, policy_type, cooldown, change=None, is_percent=False, desired_capacity=None, args=None): policy_id = utils.get_id(policy) group_id = utils.get_id(scaling_group) uri = "/%s/%s/policies/%s" % (self.uri_base, group_id, policy_id) body = self._create_policy_body(name=name, policy_type=policy_type, cooldown=cooldown, change=change, is_percent=is_percent, desired_capacity=desired_capacity, args=args) resp, resp_body = self.api.method_put(uri, body=body) def update_policy(self, scaling_group, policy, name=None, policy_type=None, cooldown=None, change=None, is_percent=False, desired_capacity=None, args=None): uri = "/%s/%s/policies/%s" % (self.uri_base, utils.get_id(scaling_group), utils.get_id(policy)) if not isinstance(policy, AutoScalePolicy): policy = self.get_policy(scaling_group, policy) body = {"name": name or policy.name, "type": policy_type or policy.type, "cooldown": cooldown or policy.cooldown, } if desired_capacity is not None: body["desiredCapacity"] = desired_capacity elif change is not None: if is_percent: body["changePercent"] = change else: body["change"] = change else: if getattr(policy, "changePercent", None) is not None: body["changePercent"] = policy.changePercent elif getattr(policy, "change", None) is not None: body["change"] = policy.change elif getattr(policy, "desiredCapacity", None) is not None: body["desiredCapacity"] = policy.desiredCapacity args = args or getattr(policy, "args", None) if args is not None: body["args"] = args resp, resp_body = self.api.method_put(uri, body=body) return None def execute_policy(self, scaling_group, policy): uri = "/%s/%s/policies/%s/execute" % (self.uri_base, utils.get_id(scaling_group), utils.get_id(policy)) resp, resp_body = self.api.method_post(uri) return None def delete_policy(self, scaling_group, policy): uri = "/%s/%s/policies/%s" % (self.uri_base, utils.get_id(scaling_group), utils.get_id(policy)) resp, resp_body = self.api.method_delete(uri) def _create_webhook_body(self, name, metadata=None): if metadata is None: metadata = {} body = {"name": name, "metadata": metadata} return body def add_webhook(self, scaling_group, policy, name, metadata=None): uri = "/%s/%s/policies/%s/webhooks" % (self.uri_base, utils.get_id(scaling_group), utils.get_id(policy)) body = self._create_webhook_body(name, metadata=metadata) body = [body] resp, resp_body = self.api.method_post(uri, body=body) data = resp_body.get("webhooks")[0] return AutoScaleWebhook(self, data, policy, scaling_group) def list_webhooks(self, scaling_group, policy): uri = "/%s/%s/policies/%s/webhooks" % (self.uri_base, utils.get_id(scaling_group), utils.get_id(policy)) resp, resp_body = self.api.method_get(uri) return [AutoScaleWebhook(self, data, policy, scaling_group) for data in resp_body.get("webhooks", [])] def get_webhook(self, scaling_group, policy, webhook): uri = "/%s/%s/policies/%s/webhooks/%s" % (self.uri_base, utils.get_id(scaling_group), utils.get_id(policy), utils.get_id(webhook)) resp, resp_body = self.api.method_get(uri) data = resp_body.get("webhook") return AutoScaleWebhook(self, data, policy, scaling_group) def replace_webhook(self, scaling_group, policy, webhook, name, metadata=None): uri = "/%s/%s/policies/%s/webhooks/%s" % (self.uri_base, utils.get_id(scaling_group), utils.get_id(policy), utils.get_id(webhook)) group_id = utils.get_id(scaling_group) policy_id = utils.get_id(policy) webhook_id = utils.get_id(webhook) body = self._create_webhook_body(name, metadata=metadata) resp, resp_body = self.api.method_put(uri, body=body) def update_webhook(self, scaling_group, policy, webhook, name=None, metadata=None): uri = "/%s/%s/policies/%s/webhooks/%s" % (self.uri_base, utils.get_id(scaling_group), utils.get_id(policy), utils.get_id(webhook)) if not isinstance(webhook, AutoScaleWebhook): webhook = self.get_webhook(scaling_group, policy, webhook) body = {"name": name or webhook.name, "metadata": metadata or webhook.metadata, } resp, resp_body = self.api.method_put(uri, body=body) webhook.reload() return webhook
Apache License 2.0
osmr/imgclsmob
gluon/gluoncv2/models/sepreresnet_cifar.py
sepreresnet164bn_svhn
python
def sepreresnet164bn_svhn(classes=10, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=164, bottleneck=True, model_name="sepreresnet164bn_svhn", **kwargs)
SE-PreResNet-164(BN) model for SVHN from 'Squeeze-and-Excitation Networks,' https://arxiv.org/abs/1709.01507. Parameters: ---------- classes : int, default 10 Number of classification classes. pretrained : bool, default False Whether to load the pretrained weights for model. ctx : Context, default CPU The context in which to load the pretrained weights. root : str, default '~/.mxnet/models' Location for keeping the model parameters.
https://github.com/osmr/imgclsmob/blob/ea5f784eea865ce830f3f97c5c1d1f6491d9cbb2/gluon/gluoncv2/models/sepreresnet_cifar.py#L368-L384
__all__ = ['CIFARSEPreResNet', 'sepreresnet20_cifar10', 'sepreresnet20_cifar100', 'sepreresnet20_svhn', 'sepreresnet56_cifar10', 'sepreresnet56_cifar100', 'sepreresnet56_svhn', 'sepreresnet110_cifar10', 'sepreresnet110_cifar100', 'sepreresnet110_svhn', 'sepreresnet164bn_cifar10', 'sepreresnet164bn_cifar100', 'sepreresnet164bn_svhn', 'sepreresnet272bn_cifar10', 'sepreresnet272bn_cifar100', 'sepreresnet272bn_svhn', 'sepreresnet542bn_cifar10', 'sepreresnet542bn_cifar100', 'sepreresnet542bn_svhn', 'sepreresnet1001_cifar10', 'sepreresnet1001_cifar100', 'sepreresnet1001_svhn', 'sepreresnet1202_cifar10', 'sepreresnet1202_cifar100', 'sepreresnet1202_svhn'] import os from mxnet import cpu from mxnet.gluon import nn, HybridBlock from .common import conv3x3_block from .sepreresnet import SEPreResUnit class CIFARSEPreResNet(HybridBlock): def __init__(self, channels, init_block_channels, bottleneck, bn_use_global_stats=False, in_channels=3, in_size=(32, 32), classes=10, **kwargs): super(CIFARSEPreResNet, self).__init__(**kwargs) self.in_size = in_size self.classes = classes with self.name_scope(): self.features = nn.HybridSequential(prefix="") self.features.add(conv3x3_block( in_channels=in_channels, out_channels=init_block_channels, bn_use_global_stats=bn_use_global_stats)) in_channels = init_block_channels for i, channels_per_stage in enumerate(channels): stage = nn.HybridSequential(prefix="stage{}_".format(i + 1)) with stage.name_scope(): for j, out_channels in enumerate(channels_per_stage): strides = 2 if (j == 0) and (i != 0) else 1 stage.add(SEPreResUnit( in_channels=in_channels, out_channels=out_channels, strides=strides, bn_use_global_stats=bn_use_global_stats, bottleneck=bottleneck, conv1_stride=False)) in_channels = out_channels self.features.add(stage) self.features.add(nn.AvgPool2D( pool_size=8, strides=1)) self.output = nn.HybridSequential(prefix="") self.output.add(nn.Flatten()) self.output.add(nn.Dense( units=classes, in_units=in_channels)) def hybrid_forward(self, F, x): x = self.features(x) x = self.output(x) return x def get_sepreresnet_cifar(classes, blocks, bottleneck, model_name=None, pretrained=False, ctx=cpu(), root=os.path.join("~", ".mxnet", "models"), **kwargs): assert (classes in [10, 100]) if bottleneck: assert ((blocks - 2) % 9 == 0) layers = [(blocks - 2) // 9] * 3 else: assert ((blocks - 2) % 6 == 0) layers = [(blocks - 2) // 6] * 3 channels_per_layers = [16, 32, 64] init_block_channels = 16 channels = [[ci] * li for (ci, li) in zip(channels_per_layers, layers)] if bottleneck: channels = [[cij * 4 for cij in ci] for ci in channels] net = CIFARSEPreResNet( channels=channels, init_block_channels=init_block_channels, bottleneck=bottleneck, classes=classes, **kwargs) if pretrained: if (model_name is None) or (not model_name): raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.") from .model_store import get_model_file net.load_parameters( filename=get_model_file( model_name=model_name, local_model_store_dir_path=root), ctx=ctx) return net def sepreresnet20_cifar10(classes=10, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=20, bottleneck=False, model_name="sepreresnet20_cifar10", **kwargs) def sepreresnet20_cifar100(classes=100, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=20, bottleneck=False, model_name="sepreresnet20_cifar100", **kwargs) def sepreresnet20_svhn(classes=10, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=20, bottleneck=False, model_name="sepreresnet20_svhn", **kwargs) def sepreresnet56_cifar10(classes=10, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=56, bottleneck=False, model_name="sepreresnet56_cifar10", **kwargs) def sepreresnet56_cifar100(classes=100, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=56, bottleneck=False, model_name="sepreresnet56_cifar100", **kwargs) def sepreresnet56_svhn(classes=10, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=56, bottleneck=False, model_name="sepreresnet56_svhn", **kwargs) def sepreresnet110_cifar10(classes=10, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=110, bottleneck=False, model_name="sepreresnet110_cifar10", **kwargs) def sepreresnet110_cifar100(classes=100, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=110, bottleneck=False, model_name="sepreresnet110_cifar100", **kwargs) def sepreresnet110_svhn(classes=10, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=110, bottleneck=False, model_name="sepreresnet110_svhn", **kwargs) def sepreresnet164bn_cifar10(classes=10, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=164, bottleneck=True, model_name="sepreresnet164bn_cifar10", **kwargs) def sepreresnet164bn_cifar100(classes=100, **kwargs): return get_sepreresnet_cifar(classes=classes, blocks=164, bottleneck=True, model_name="sepreresnet164bn_cifar100", **kwargs)
MIT License
qahive/robotframework-puppeteer
PuppeteerLibrary/keywords/element.py
ElementKeywords.element_should_be_enabled
python
def element_should_be_enabled(self, locator): return self.loop.run_until_complete(self.get_async_keyword_group().element_should_be_enabled(locator))
Verifies that element identified by locator is enabled.
https://github.com/qahive/robotframework-puppeteer/blob/fba8f5c71dcec0a778a9ed22129bf1dc5e8ef1c3/PuppeteerLibrary/keywords/element.py#L112-L116
from PuppeteerLibrary.base.robotlibcore import keyword from PuppeteerLibrary.base.librarycomponent import LibraryComponent from PuppeteerLibrary.ikeywords.ielement_async import iElementAsync class ElementKeywords(LibraryComponent): def __init__(self, ctx): super().__init__(ctx) def get_async_keyword_group(self) -> iElementAsync: return self.ctx.get_current_library_context().get_async_keyword_group(type(self).__name__) @keyword def click_element(self, locator, noWaitAfter='False'): self.info(f"Clicking element '{locator}'.") self.loop.run_until_complete(self.get_async_keyword_group().click_element( locator=locator, noWaitAfter=noWaitAfter )) @keyword def click_link(self, locator): self.info(f"Clicking link '{locator}'.") return self.loop.run_until_complete(self.get_async_keyword_group().click_link(locator)) @keyword def click_button(self, locator): self.info(f"Clicking button '{locator}'.") self.loop.run_until_complete(self.get_async_keyword_group().click_button(locator)) @keyword def click_image(self, locator): self.info(f"Clicking image '{locator}'.") self.loop.run_until_complete(self.get_async_keyword_group().click_image(locator)) @keyword def click_element_at_coordinate(self, locator, xoffset, yoffset): self.info(f"Clicking element at coordinate '{locator}' at xoffset: '{xoffset}', yoffset: '{yoffset}'.") self.loop.run_until_complete(self.get_async_keyword_group().click_element_at_coordinate(locator, xoffset, yoffset)) @keyword def upload_file(self, locator, file_path): return self.loop.run_until_complete(self.get_async_keyword_group().upload_file(locator, file_path)) @keyword def press_keys(self, locator, *keys): self.info(f"Sending key(s) {keys} to {locator} element.") return self.loop.run_until_complete(self.get_async_keyword_group().press_keys(locator, *keys)) @keyword def element_should_be_disabled(self, locator): return self.loop.run_until_complete(self.get_async_keyword_group().element_should_be_disabled(locator)) @keyword
Apache License 2.0
justdoit0823/pywxclient
pywxclient/utils.py
list2orderdict
python
def list2orderdict(key_list, val_list): return OrderedDict(zip(key_list, val_list))
Return a ordered dict with two lists.
https://github.com/justdoit0823/pywxclient/blob/9a61c4c0c26d6566e6121641ab37c35b176d8e20/pywxclient/utils.py#L244-L246
import functools import json from collections import OrderedDict from urllib.request import unquote from xml.dom.minidom import Document, parseString __all__ = [ 'ParseWxRes', 'cookie_to_dict', 'MessageType', 'json_dumps', 'xml2dict', 'dict2xml', 'call_retry', 'list2orderdict'] class QRUUID: code = 0 uuid = None class JSWindow: code = None redirect_uri = None synccheck = None userAvatar = None def __init__(self, qr_uuid=None): self.QRLogin = qr_uuid class ParseException(Exception): pass class ParseWxRes: @classmethod def exec_js(cls, js_code, js_locals=None): window = JSWindow(QRUUID()) if js_locals: locals().update(js_locals) try: exec(js_code, None) except Exception: raise ParseException else: return window @classmethod def parse_qrcode_uuid(cls, res_js): window = cls.exec_js(res_js) return {'code': window.QRLogin.code, 'uuid': window.QRLogin.uuid} @classmethod def parse_login(cls, res_js): window = cls.exec_js(res_js) return { 'code': window.code, 'redirect_uri': window.redirect_uri, 'userAvatar': window.userAvatar} @classmethod def parse_new_login_page(cls, res_xml): data = xml2dict(res_xml)['error'] if 'pass_ticket' in data: data['pass_ticket'] = unquote(data['pass_ticket']) return data @classmethod def parse_sync_check(cls, res_js): js_locals = {'retcode': 'retcode', 'selector': 'selector'} window = cls.exec_js(res_js, js_locals=js_locals) return window.synccheck def cookie_to_dict(cookie): attrs = ( 'version', 'name', 'value', 'port', 'domain', 'path', 'secure', 'expires', 'discard', 'comment', 'comment_url', 'rfc2109') attr_dict = {} for attr in attrs: attr_val = getattr(cookie, attr, None) if attr_val is not None: attr_dict[attr] = attr_val attr_dict['rest'] = getattr(cookie, '_rest', {}) return attr_dict class MessageType(type): _base_slots = ( 'from_user', 'to_user', 'message', 'create_time', 'local_msg_id', 'msg_id', '_msg_value') def __new__(cls, name, bases, namespace, **kwargs): ns = dict(namespace) slots = ns.get('__slots__') or () if not bases or bases[0] == object: slots = tuple(set(cls._base_slots + tuple(slots))) ns['__slots__'] = tuple(slots) new_type = type.__new__(cls, name, bases, ns) return new_type def json_dumps(json_data, compact=False, **kwargs): if compact: return json.dumps(json_data, separators=(',', ':'), **kwargs) return json.dumps(json_data, **kwargs) def xml2dict(xml_str): if isinstance(xml_str, bytes): xml_str = xml_str.decode() if xml_str.startswith('<?xml'): xml_str = '<br/>'.join(xml_str.split('<br/>')[1: -1]).replace('\t', '') document = parseString(xml_str) root_node = document.childNodes[0] data = {} DOCUMENT_NODE = root_node.DOCUMENT_NODE ELEMENT_NODE = root_node.ELEMENT_NODE TEXT_NODE = root_node.TEXT_NODE DATA_NODE = root_node.CDATA_SECTION_NODE ALL_NODE_TYPE = (DOCUMENT_NODE, ELEMENT_NODE, TEXT_NODE, DATA_NODE) def extract_node(node_obj): node_type = node_obj.nodeType if node_type not in ALL_NODE_TYPE: return None if node_type == ELEMENT_NODE: ele_data = {} attrs = {attr: val for attr, val in node_obj.attributes.items()} if attrs: ele_data['__attrs__'] = attrs child_nodes = node_obj.childNodes if len(child_nodes) == 1 and ( child_nodes[0].nodeType in (TEXT_NODE, DATA_NODE)): if not ele_data: return extract_node(child_nodes[0]) ele_data[node_obj.nodeName] = extract_node(child_nodes[0]) else: for sub_node in child_nodes: ele_data[sub_node.nodeName] = extract_node(sub_node) return ele_data else: return node_obj.nodeValue data[root_node.nodeName] = extract_node(root_node) return data def dict2xml(data): document = Document() def create_node(root, node_data): for key, val in node_data.items(): node = document.createElement(key) if isinstance(val, dict): attrs = val.pop('__attrs__', {}) for attr, attr_val in attrs.items(): node.setAttribute(attr, attr_val) create_node(node, val) elif isinstance(val, (tuple, list)): for sub_data in val: create_node(node, sub_data) else: node.appendChild(document.createTextNode(str(val))) root.appendChild(node) create_node(document, data) return document.childNodes[0].toxml() def call_retry(retry_exceptions, retries=3): def func_decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): call_retries = kwargs.pop('retries', None) once_retries = min( call_retries if call_retries is not None else retries, 0) loop_count = once_retries + 1 while loop_count > 0: try: return func(*args, **kwargs) except retry_exceptions: if once_retries == 0: raise loop_count -= 1 raise Warning('Retry exceeds {0} times when calling {1}'.format( retries, func.__name__)) return wrapper return func_decorator
Apache License 2.0
kivy/kivy-designer
designer/components/start_page.py
DesignerStartPage.on_open_down
python
def on_open_down(self, *args): pass
Default Event Handler for 'on_open_down'
https://github.com/kivy/kivy-designer/blob/20343184a28c2851faf0c1ab451d0286d147a441/designer/components/start_page.py#L85-L88
import webbrowser from designer.utils.utils import get_designer, get_fs_encoding from kivy.properties import ObjectProperty, StringProperty from kivy.uix.boxlayout import BoxLayout from kivy.uix.button import Button from kivy.uix.scrollview import ScrollView class DesignerLinkLabel(Button): link = StringProperty(None) def on_release(self, *args): if self.link: webbrowser.open(self.link) class RecentItem(BoxLayout): path = StringProperty('') __events__ = ('on_press', ) def on_press(self, *args): class RecentFilesBox(ScrollView): grid = ObjectProperty(None) def __init__(self, **kwargs): super(RecentFilesBox, self).__init__(**kwargs) def add_recent(self, list_files): for p in list_files: if isinstance(p, bytes): p = p.decode(get_fs_encoding()) recent_item = RecentItem(path=p) self.grid.add_widget(recent_item) recent_item.bind(on_press=self.btn_release) self.grid.height += recent_item.height self.grid.height = max(self.grid.height, self.height) def btn_release(self, instance): d = get_designer() d._perform_open(instance.path) class DesignerStartPage(BoxLayout): recent_files_box = ObjectProperty(None) __events__ = ('on_open_down', 'on_new_down', 'on_help')
MIT License
goverfl0w/slash-bot
cogs/tags.py
Tags.template
python
async def template(self, ctx: SlashContext, tag_id: typing.Optional[str] = None): resp = await self.bot.db.res_sql( """SELECT value FROM tags WHERE cmd_id=?""", (ctx.command_id,) ) content = resp[0]["value"] if tag_id: try: msg: discord.Message = await ctx.channel.fetch_message(int(tag_id)) await ctx.send("Message found, replying", hidden=True) return await msg.reply(content) except ( discord.Forbidden, discord.HTTPException, discord.NotFound, TypeError, ValueError, ): await ctx.send("Couldn't find message to reply. Normally sending tag.", hidden=True) await ctx.send(content)
The base function of tags :param ctx: SlashContext :param tag_id: the tags id :return:
https://github.com/goverfl0w/slash-bot/blob/b1aac9a5a5376f9f604de0db1aad70e9fc29d3c9/cogs/tags.py#L20-L45
import typing import discord from discord.ext import commands from discord_slash import cog_ext, SlashContext from discord_slash.utils import manage_commands from modules.get_settings import get_settings guild_ids = get_settings("servers") class Tags(commands.Cog): def __init__(self, bot): self.bot: commands.Bot = bot self.tag_opt = manage_commands.create_option("reply_to", "Message ID to reply.", 3, False) self.bot.loop.create_task(self.init_tags())
MIT License
argoproj-labs/argo-client-python
argo/workflows/client/api/cron_workflow_service_api.py
CronWorkflowServiceApi.lint_cron_workflow
python
def lint_cron_workflow(self, namespace, body, **kwargs): kwargs['_return_http_data_only'] = True return self.lint_cron_workflow_with_http_info(namespace, body, **kwargs)
lint_cron_workflow # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.lint_cron_workflow(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool: execute request asynchronously :param str namespace: (required) :param V1alpha1LintCronWorkflowRequest body: (required) :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. :return: V1alpha1CronWorkflow If the method is called asynchronously, returns the request thread.
https://github.com/argoproj-labs/argo-client-python/blob/993d684cab39a834770b296e028519cec035c7b5/argo/workflows/client/api/cron_workflow_service_api.py#L442-L465
from __future__ import absolute_import import re import six from argo.workflows.client.api_client import ApiClient from argo.workflows.client.exceptions import ( ApiTypeError, ApiValueError ) class CronWorkflowServiceApi(object): def __init__(self, api_client=None): if api_client is None: api_client = ApiClient() self.api_client = api_client def create_cron_workflow(self, namespace, body, **kwargs): kwargs['_return_http_data_only'] = True return self.create_cron_workflow_with_http_info(namespace, body, **kwargs) def create_cron_workflow_with_http_info(self, namespace, body, **kwargs): local_var_params = locals() all_params = [ 'namespace', 'body' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method create_cron_workflow" % key ) local_var_params[key] = val del local_var_params['kwargs'] if self.api_client.client_side_validation and ('namespace' not in local_var_params or local_var_params['namespace'] is None): raise ApiValueError("Missing the required parameter `namespace` when calling `create_cron_workflow`") if self.api_client.client_side_validation and ('body' not in local_var_params or local_var_params['body'] is None): raise ApiValueError("Missing the required parameter `body` when calling `create_cron_workflow`") collection_formats = {} path_params = {} if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] query_params = [] header_params = {} form_params = [] local_var_files = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json']) auth_settings = [] return self.api_client.call_api( '/api/v1/cron-workflows/{namespace}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1alpha1CronWorkflow', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def delete_cron_workflow(self, namespace, name, **kwargs): kwargs['_return_http_data_only'] = True return self.delete_cron_workflow_with_http_info(namespace, name, **kwargs) def delete_cron_workflow_with_http_info(self, namespace, name, **kwargs): local_var_params = locals() all_params = [ 'namespace', 'name', 'delete_options_grace_period_seconds', 'delete_options_preconditions_uid', 'delete_options_preconditions_resource_version', 'delete_options_orphan_dependents', 'delete_options_propagation_policy', 'delete_options_dry_run' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method delete_cron_workflow" % key ) local_var_params[key] = val del local_var_params['kwargs'] if self.api_client.client_side_validation and ('namespace' not in local_var_params or local_var_params['namespace'] is None): raise ApiValueError("Missing the required parameter `namespace` when calling `delete_cron_workflow`") if self.api_client.client_side_validation and ('name' not in local_var_params or local_var_params['name'] is None): raise ApiValueError("Missing the required parameter `name` when calling `delete_cron_workflow`") collection_formats = {} path_params = {} if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] if 'name' in local_var_params: path_params['name'] = local_var_params['name'] query_params = [] if 'delete_options_grace_period_seconds' in local_var_params and local_var_params['delete_options_grace_period_seconds'] is not None: query_params.append(('deleteOptions.gracePeriodSeconds', local_var_params['delete_options_grace_period_seconds'])) if 'delete_options_preconditions_uid' in local_var_params and local_var_params['delete_options_preconditions_uid'] is not None: query_params.append(('deleteOptions.preconditions.uid', local_var_params['delete_options_preconditions_uid'])) if 'delete_options_preconditions_resource_version' in local_var_params and local_var_params['delete_options_preconditions_resource_version'] is not None: query_params.append(('deleteOptions.preconditions.resourceVersion', local_var_params['delete_options_preconditions_resource_version'])) if 'delete_options_orphan_dependents' in local_var_params and local_var_params['delete_options_orphan_dependents'] is not None: query_params.append(('deleteOptions.orphanDependents', local_var_params['delete_options_orphan_dependents'])) if 'delete_options_propagation_policy' in local_var_params and local_var_params['delete_options_propagation_policy'] is not None: query_params.append(('deleteOptions.propagationPolicy', local_var_params['delete_options_propagation_policy'])) if 'delete_options_dry_run' in local_var_params and local_var_params['delete_options_dry_run'] is not None: query_params.append(('deleteOptions.dryRun', local_var_params['delete_options_dry_run'])) collection_formats['deleteOptions.dryRun'] = 'multi' header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) auth_settings = [] return self.api_client.call_api( '/api/v1/cron-workflows/{namespace}/{name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='object', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats) def get_cron_workflow(self, namespace, name, **kwargs): kwargs['_return_http_data_only'] = True return self.get_cron_workflow_with_http_info(namespace, name, **kwargs) def get_cron_workflow_with_http_info(self, namespace, name, **kwargs): local_var_params = locals() all_params = [ 'namespace', 'name', 'get_options_resource_version' ] all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout' ] ) for key, val in six.iteritems(local_var_params['kwargs']): if key not in all_params: raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method get_cron_workflow" % key ) local_var_params[key] = val del local_var_params['kwargs'] if self.api_client.client_side_validation and ('namespace' not in local_var_params or local_var_params['namespace'] is None): raise ApiValueError("Missing the required parameter `namespace` when calling `get_cron_workflow`") if self.api_client.client_side_validation and ('name' not in local_var_params or local_var_params['name'] is None): raise ApiValueError("Missing the required parameter `name` when calling `get_cron_workflow`") collection_formats = {} path_params = {} if 'namespace' in local_var_params: path_params['namespace'] = local_var_params['namespace'] if 'name' in local_var_params: path_params['name'] = local_var_params['name'] query_params = [] if 'get_options_resource_version' in local_var_params and local_var_params['get_options_resource_version'] is not None: query_params.append(('getOptions.resourceVersion', local_var_params['get_options_resource_version'])) header_params = {} form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) auth_settings = [] return self.api_client.call_api( '/api/v1/cron-workflows/{namespace}/{name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1alpha1CronWorkflow', auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats)
Apache License 2.0
google/uncertainty-baselines
uncertainty_baselines/models/bert_dropout.py
_monte_carlo_dropout
python
def _monte_carlo_dropout(inputs: tf.Tensor, dropout_rate: float, use_mc_dropout: bool, channel_wise_dropout: bool) -> tf.Tensor: training = True if use_mc_dropout else None noise_shape = None input_size = len(inputs.shape) if input_size not in (3, 4): raise ValueError(f'"inputs" shape can only be 3 or 4, got {input_size}.') if channel_wise_dropout: if input_size == 3: noise_shape = [inputs.shape[0], 1, inputs.shape[-1]] elif input_size == 4: noise_shape = [inputs.shape[0], inputs.shape[1], 1, 1] return tf.keras.layers.Dropout(dropout_rate, noise_shape=noise_shape)( inputs, training=training)
Implements a Monte Carlo dropout layer callable for the Transformer model. Args: inputs: An input tensor in the BERT encoder. It can be either a 3D layer output with shape [batch_size, seq_len, hidden_dim], or a 4D attention mask with shape [batch_size, num_head, seq_len, seq_len]. dropout_rate: Dropout rate. use_mc_dropout: Whether to enable Monte Carlo dropout at inference time. channel_wise_dropout: Whether to apply structured dropout along the dimension of the hidden channels or of the attention heads. Returns: (tf.Tensor) Output of the (structured) Monte Carlo dropout layer.
https://github.com/google/uncertainty-baselines/blob/d37c17c4b08a88d6546bbf299b59127a03398404/uncertainty_baselines/models/bert_dropout.py#L28-L62
import functools import math from typing import Any, Dict, Union, Tuple import tensorflow as tf from official.modeling import tf_utils from official.nlp.modeling import layers as bert_layers from official.nlp.modeling import networks as bert_encoder
Apache License 2.0
cue/greplin-nagios-utils
checklib/src/greplin/nagios.py
percent
python
def percent(value): return "%f%%" % (value * 100)
Formats the given float as a percentage.
https://github.com/cue/greplin-nagios-utils/blob/9a90d98fcd21da23b24b2fe5165d4a0c7dd680a1/checklib/src/greplin/nagios.py#L103-L105
import httplib import json import socket import sys import time import threading UNKNOWN = 3 CRITICAL = 2 WARNING = 1 OK = 0 STATUS_NAME = ['OK', 'WARN', 'CRIT', 'UNKNOWN'] GLOBAL_CONFIG = threading.local() GLOBAL_CONFIG.outfile = sys.stdout def output(msg): GLOBAL_CONFIG.outfile.write(msg) GLOBAL_CONFIG.outfile.write('\n') def wgetWithTimeout(host, port, path, timeout, secure = False): start = time.time() try: if secure: conn = httplib.HTTPSConnection(host, port, timeout=timeout) else: conn = httplib.HTTPConnection(host, port, timeout=timeout) conn.request('GET', path) body = conn.getresponse().read() return time.time() - start, body except (socket.gaierror, socket.error): output("CRIT: Could not connect to %s" % host) exit(CRITICAL) except socket.timeout: output("CRIT: Timed out after %s seconds" % timeout) exit(CRITICAL) def parseJson(text): try: return json.loads(text) except ValueError, e: output('CRIT: %s (text was %r)' % (e, text)) exit(CRITICAL) def parseJsonFile(filename): try: with open(filename) as f: return parseJson(f.read()) except IOError, e: output('UNKNOWN: %s' % e) exit(UNKNOWN) def lookup(source, *keys, **kw): fallback = kw.get('default') try: for key in keys: source = source[key] return source except (KeyError, AttributeError, TypeError): return fallback def statValue(data, *keys, **kw): return float(lookup(data, *keys, **kw))
Apache License 2.0
pygame/pygameweb
pygameweb/wiki/views.py
edit
python
def edit(link): page = Wiki.for_link(current_session, link) if page is None: page = Wiki(link=link, title=link, latest=1) if page.locked and not current_user.has_role('admin'): flash('Wiki page locked.') return current_app.login_manager.unauthorized() form = WikiForm(obj=page) if request.method == 'GET': form.changes.data = '' elif request.method == 'POST': if form.validate_on_submit(): page.new_version(current_session) page.content = form.content.data page.changes = form.changes.data page.users_id = current_user.id current_session.add(page) current_session.commit() delete_view_wiki_cache(link) return redirect(url_for('wiki.index', link=page.link)) return render_template('wiki/edit.html', form=form, wiki=page)
the wiki page.
https://github.com/pygame/pygameweb/blob/144b2483d090c1ecd9482eb7d47454137210ba9d/pygameweb/wiki/views.py#L184-L214
from flask import ( abort, Blueprint, current_app, redirect, render_template, request, Response, url_for ) from flask_caching import make_template_fragment_key from flask_sqlalchemy_session import current_session import ghdiff from flask_security import login_required, roles_required, current_user from pygameweb.wiki.models import Wiki from pygameweb.wiki.forms import WikiForm from pygameweb.cache import cache wiki_blueprint = Blueprint('wiki', __name__, template_folder='../templates/') def wiki_for(link): result = (current_session .query(Wiki) .filter(Wiki.link == link) .filter(Wiki.latest == 1) .first()) if not result: abort(404) return result @wiki_blueprint.route('/wiki/', methods=['GET']) @wiki_blueprint.route('/wiki/<link>/', methods=['GET']) @wiki_blueprint.route('/wiki/<link>', methods=['GET']) def index(link='index'): action = request.args.get('action', '') if action == 'source': return source(link) elif action == 'history': return history(link) elif action == 'diff': return diff(link) elif action == 'meta': pass elif action == 'links': pass return render_template('wiki/view.html', link=link, wiki_for=wiki_for) @wiki_blueprint.route('/wiki/<link>/revert', methods=['GET']) @login_required @roles_required('members') def revert(link): if wiki_for(link).locked and not current_user.has_role('admin'): flash('Wiki page locked.') return current_app.login_manager.unauthorized() latest = request.args.get('latest', None) if latest is not None: oldone = wiki_for(link) newone = (current_session .query(Wiki) .filter(Wiki.link == link) .filter(Wiki.id == int(latest)) .first()) oldone.latest = 0 newone.latest = 1 current_session.add(newone) current_session.add(oldone) current_session.commit() return redirect(url_for('wiki.index', link=link, id=newone.id)) else: abort(404) @wiki_blueprint.route('/wiki/<link>/source', methods=['GET']) def source(link): the_id = request.args.get('id', '') if the_id: result = (current_session .query(Wiki) .filter(Wiki.id == int(the_id)) .first()) if not result: abort(404) else: result = wiki_for(link) return Response(result.content, mimetype='text/plain') @wiki_blueprint.route('/wiki/<link>/history', methods=['GET']) @login_required @roles_required('members') def history(link): result = wiki_for(link) versions = (current_session .query(Wiki) .filter(Wiki.link == link) .order_by(Wiki.id.desc()) .all()) return render_template('wiki/history.html', versions=versions, wiki=result) @wiki_blueprint.route('/wiki/<link>/diff', methods=['GET']) def diff(link): result = wiki_for(link) new_id = request.args.get('newid', None) old_id = request.args.get('oldid', None) if new_id is not None and old_id is not None: new_id = int(new_id) old_id = int(old_id) results = (current_session .query(Wiki) .filter(Wiki.id.in_([new_id, old_id])) .all()) if not results or len(results) != 2: abort(404) old_wiki = [o for o in results if o.id == old_id][0] new_wiki = [o for o in results if o.id == new_id][0] html_diff = ghdiff.diff(old_wiki.content_rendered, new_wiki.content_rendered) else: abort(404) return render_template('wiki/diff.html', wiki=result, html_diff=html_diff) @wiki_blueprint.route('/wiki/recent.php', methods=['GET']) @wiki_blueprint.route('/wiki/recent', methods=['GET']) def recent(): pages = (current_session .query(Wiki) .order_by(Wiki.datetimeon.desc()) .limit(30) .all()) from itertools import groupby def grouper(item): if not item.datetimeon: return (None, None, None) return item.datetimeon.year, item.datetimeon.month, item.datetimeon.day day_groups = [] for ((year, month, day), items) in groupby(pages, grouper): day_groups.append(((year, month, day), list(items))) return render_template('wiki/recent.html', day_groups=day_groups) @wiki_blueprint.route('/wiki/<link>/edit', methods=['GET', 'POST']) @login_required @roles_required('members')
BSD 2-Clause Simplified License
gapml/cv
gapcv/utils/img_tools.py
ImgUtils.__init__
python
def __init__(self, root_path='./', tree=1, remove_folder=False): self.labels = None self.root_path = None self.tree = tree self.remove_folder = remove_folder self._transf = '1to2' self._labels_org = [] self._end = None self._end2 = None if not os.path.isdir(root_path): raise TypeError('String expected for directory path') else: self.labels = os.listdir(root_path) self.root_path = root_path if remove_folder: answere_ok = False while answere_ok is False: try: warning = input('Warning! this will delete your image dataset. Are you sure? [Yes/no]: ') warning = warning[0].lower() if warning in ('y', 'n'): answere_ok = True except: continue if warning == 'y': shutil.rmtree(self.root_path) print('Your files were deleted')
Class Constructor :param root_path: main image folder root :param tree: type of folder tree :param remove_folder: remove folder from directory
https://github.com/gapml/cv/blob/9d513f8470a851b0f42396ecbe3c8bfaac95882a/gapcv/utils/img_tools.py#L35-L69
import os import random import shutil class ImgUtils(object):
Apache License 2.0
jdswinbank/comet
comet/service/broker.py
Options._check_for_ivoid
python
def _check_for_ivoid(self): if not self["local_ivo"] and (self["receive"] or self["broadcast"]): self.parser.error("IVOA identifier required (--local-ivo).")
Ensure that an IVOID has been supplied if broadcasting or receiving.
https://github.com/jdswinbank/comet/blob/f4ea4c8598dbb0d7adf7a5e6ffbf31c7ef194d13/comet/service/broker.py#L191-L194
import os.path from tempfile import gettempdir from ipaddress import ip_network from twisted.application.service import MultiService from twisted.internet import reactor from twisted.internet.task import LoopingCall from twisted.plugin import getPlugins import comet import comet.log as log from comet.constants import DEFAULT_SUBMIT_PORT, DEFAULT_SUBSCRIBE_PORT from comet.service.broadcaster import makeBroadcasterService from comet.service.subscriber import makeSubscriberService from comet.service.receiver import makeReceiverService from comet.utility import Event_DB, BaseOptions, valid_ivoid, valid_xpath from comet.utility import coerce_to_client_endpoint, coerce_to_server_endpoint from comet.validator import CheckIVOID, CheckPreviouslySeen, CheckSchema import comet.plugins from comet.icomet import IHandler, IHasOptions from comet.handler import SpawnCommand, EventRelay __all__ = ["makeService", "Options"] MAX_AGE = 30.0 * 24 * 60 * 60 PRUNE_INTERVAL = 6 * 60 * 60 BCAST_TEST_INTERVAL = 3600 class Options(BaseOptions): PROG = "twistd [options] comet" def _configureParser(self): std_group = self.parser.add_argument_group( "Standard arguments", "Global options affecting " "broker operation." ) std_group.add_argument( "--local-ivo", type=valid_ivoid, help="IVOA identifier for this system. " "Required if using --receive or --broadcast.", ) std_group.add_argument( "--eventdb", default=gettempdir(), help="Event database root [default=%(default)s].", ) rcv_group = self.parser.add_argument_group( "Event Receiver", "Receive events submitted " "by remote authors." ) rcv_group.add_argument( "--receive", default=None, const=f"tcp:{DEFAULT_SUBMIT_PORT}", nargs="?", action="append", type=lambda ep: coerce_to_server_endpoint(reactor, ep), help="Add an endpoint for receiving events.", ) rcv_group.add_argument( "--receive-whitelist", default=[ip_network("0.0.0.0/0")], nargs="*", type=ip_network, help="Networks from which to accept " "event submissions [default=accept from " "everywhere].", ) bcast_group = self.parser.add_argument_group( "Event Broadcaster", "Broadcast events to " "remote subscribers." ) bcast_group.add_argument( "--broadcast", default=None, const=f"tcp:{DEFAULT_SUBSCRIBE_PORT}", nargs="?", action="append", type=lambda ep: coerce_to_server_endpoint(reactor, ep), help="Add an endpoint for broadcasting events.", ) bcast_group.add_argument( "--broadcast-test-interval", default=BCAST_TEST_INTERVAL, type=int, help="Interval between test event broadcasts " "(seconds) [default=%(default)s].", ) bcast_group.add_argument( "--broadcast-whitelist", default=[ip_network("0.0.0.0/0")], nargs="*", type=ip_network, help="Networks from which to accept " "subscription requests [default=accept " "from everywhere].", ) sub_group = self.parser.add_argument_group( "Event Subscriber", "Subscribe to event streams" " from remote brokers." ) sub_group.add_argument( "--subscribe", default=None, action="append", type=lambda ep: coerce_to_client_endpoint( reactor, ep, DEFAULT_SUBSCRIBE_PORT ), help="Add a remote broker to which " "to subscribe.", ) sub_group.add_argument( "--filter", default=None, action="append", dest="filters", type=valid_xpath, help="XPath filter to be applied to events " "received from remote brokers.", ) proc_group = self.parser.add_argument_group( "Event Processors", "Define 'event handlers' " "which are applied to all " "events processed by this " "system.", ) proc_group.add_argument( "--cmd", default=None, action="append", dest="handlers", type=SpawnCommand, help="External command to spawn when an " "event is received.", ) for plugin in getPlugins(IHandler, comet.plugins): proc_group.add_argument( f"--{plugin.name}", help=f"Enable the {plugin.name} plugin.", action="append_const", const=plugin.name, dest="plugins", ) if IHasOptions.providedBy(plugin): for name, default, description in plugin.get_options(): proc_group.add_argument( f"--{plugin.name}-{name}", default=default, help=description ) def _checkOptions(self): self._check_for_ivoid() self._configure_plugins() def _configure_plugins(self): if self._config.handlers is None: self._config.handlers = [] if self._config.plugins: for plugin in getPlugins(IHandler, comet.plugins): if plugin.name in self._config.plugins: if IHasOptions.providedBy(plugin): for name, _, _ in plugin.get_options(): plugin.set_option( name, getattr( self._config, f"{plugin.name}-{name}".replace("-", "_"), ), ) self._config.handlers.append(plugin)
BSD 2-Clause Simplified License
opencord/xos
lib/xos-genx/xosgenx/generator.py
XOSProcessor._read_input_from_files
python
def _read_input_from_files(files): line_map = [] input = "" for fname in files: with open(fname) as infile: line_map.append((len(input.split("\n")), fname)) input += infile.read() return (input, line_map)
Read the files and return the combined text read. Also returns a list of (line_number, filename) tuples that tell which starting line corresponds to each file.
https://github.com/opencord/xos/blob/e52d3ea83d3a26b8d0a72cccce7898258926f5eb/lib/xos-genx/xosgenx/generator.py#L81-L93
from __future__ import absolute_import, print_function import os import jinja2 import plyxproto.parser as plyxproto import yaml from colorama import Fore import sys from . import jinja2_extensions from .proto2xproto import Proto2XProto from .xos2jinja import XOS2Jinja from .validator import XProtoValidator loader = jinja2.PackageLoader(__name__, "templates") env = jinja2.Environment(loader=loader) class XOSProcessorArgs: default_rev = False default_output = None default_attic = None default_kvpairs = None default_write_to_file = None default_dest_file = None default_dest_extension = None default_target = None default_checkers = None default_verbosity = ( 0 ) default_include_models = ( [] ) default_include_apps = [] default_strict_validation = False default_lint = False def __init__(self, **kwargs): self.rev = XOSProcessorArgs.default_rev self.output = XOSProcessorArgs.default_output self.attic = XOSProcessorArgs.default_attic self.kvpairs = XOSProcessorArgs.default_kvpairs self.verbosity = XOSProcessorArgs.default_verbosity self.write_to_file = XOSProcessorArgs.default_write_to_file self.default_dest_file = XOSProcessorArgs.default_dest_file self.default_dest_extension = XOSProcessorArgs.default_dest_extension self.default_target = XOSProcessorArgs.default_target self.default_checkers = XOSProcessorArgs.default_target self.include_models = XOSProcessorArgs.default_include_models self.include_apps = XOSProcessorArgs.default_include_apps self.strict_validation = XOSProcessorArgs.default_strict_validation self.lint = XOSProcessorArgs.default_lint for (k, v) in kwargs.items(): setattr(self, k, v) class XOSProcessor: @staticmethod
Apache License 2.0
cereja-project/cereja
cereja/mltools/split_data.py
Corpus.split_data
python
def split_data(self, test_max_size: int = None, source_vocab_size: int = None, target_vocab_size: int = None, shuffle=True, take_parallel_data=True, take_corpus_instances=False, legacy_test=None): self.source.reset_freq() self.target.reset_freq() train = [] test = [] if legacy_test is not None: test = Corpus(*self.distinct_from_parallel(legacy_test), source_name=self.source_language, target_name=self.target_language) test_max_size = test_max_size if test_max_size is not None and isinstance(test_max_size, (int, float)) else len( self.source.data) - self.n_train if source_vocab_size is not None or target_vocab_size is not None: data = list(self._get_vocab_data(source_vocab_size=source_vocab_size, target_vocab_size=target_vocab_size)) else: data = list(zip(self.source.data, self.target.data)) if shuffle: random.shuffle(data) for x, y in data: if x == '' or y == '': continue if legacy_test is not None: if self.source.preprocess(x) in test.source.phrases_freq: continue if (self._can_go_test(x, y) and len(test) < test_max_size) and legacy_test is None: test.append([x, y]) self._update_filters(x, y) continue train.append([x, y]) if take_parallel_data is False: return (*get_cols(train), *get_cols(test)) if take_corpus_instances is True: train = self.load_from_parallel_data(train, self.source_language, self.target_language) test = self.load_from_parallel_data(test, self.source_language, self.target_language) return train, test return train, test
Guarantees test data without data identical to training and only with vocabulary that exists in training :param test_max_size: int = max examples on test data :param source_vocab_size: int = restrict most common vocab :param target_vocab_size: int = restrict most common vocab :param shuffle: bool = randomize :param take_parallel_data: bool = zipped data if true else return (x_train, y_train, x_test, y_test) :param take_corpus_instances: bool = return new instances for train data and test data :param legacy_test: List[Tuple[str,str]] = parallel data
https://github.com/cereja-project/cereja/blob/c5000ddb3f640dc6089f991f36808f9f5e09c0f0/cereja/mltools/split_data.py#L229-L283
import warnings from cereja import FileIO from cereja.array import get_cols from cereja.mltools.pln import LanguageData import random import csv from cereja.system import Path __all__ = ['Corpus'] class Corpus(object): def __init__(self, source_data, target_data, source_name=None, percent_train=0.8, target_name=None, stop_words=(), punctuation='!?,.', to_lower=True, is_remove_punctuation=True, is_remove_stop_words=True, is_remove_accent=False, is_destructive=False): self.source = LanguageData(source_data, name=source_name, stop_words=stop_words, punctuation=punctuation, to_lower=to_lower, is_remove_punctuation=is_remove_punctuation, is_remove_stop_words=is_remove_stop_words, is_remove_accent=is_remove_accent, is_destructive=is_destructive) self.target = LanguageData(target_data, name=target_name, stop_words=stop_words, punctuation=punctuation, to_lower=to_lower, is_remove_punctuation=is_remove_punctuation, is_remove_stop_words=is_remove_stop_words, is_remove_accent=is_remove_accent, is_destructive=is_destructive) self._percent_train = percent_train self._n_train = self.n_train self._valid_parallel_data(self.source.data, self.target.data) def __iter__(self): return zip(self.source, self.target) def __len__(self): return len(self.source) def __repr__(self): return f'Corpus(examples: {len(self)} - source_vocab_size: {self.source.vocab_size} - target_vocab_size:{self.target.vocab_size})' def __getitem__(self, item): if isinstance(item, int): return self.source.data[item], self.target.data[item] return list(zip(self.source.data[item], self.target.data[item])) @property def source_language(self): return self.source.config.name @property def target_language(self): return self.target.config.name @property def config(self): return {'source': self.source.config.get(), 'target': self.source.config.get()} def set_config(self, **kwargs): source_config = kwargs.get('source') target_config = kwargs.get('target') if source_config: self.source.config.set_config(**source_config) if target_config: self.target.config.set_config(**target_config) @staticmethod def is_parallel(data): try: for x, y in data: if isinstance(x, (str, int, float)) and isinstance(y, (str, int, float)): return True break except: pass return False @classmethod def distinct_from_parallel(cls, data): return get_cols(data) @classmethod def load_from_parallel_data(cls, data, source_name: str = None, target_name: str = None, **kwargs): if cls.is_parallel(data): source_data, target_data = cls.distinct_from_parallel(data) return cls(source_data, target_data, source_name=source_name, target_name=target_name, **kwargs) raise ValueError("isn't valid parallel data") @property def n_train(self): return int(self._percent_train * len(self.source.data)) @property def n_test(self): return len(self.source.data) - self.n_train def _can_go_test(self, x, y): x = self.source.preprocess(x) y = self.target.preprocess(y) if self.source.phrases_freq.get(x) == 1 and len(x.split()) >= 4: x, y = x.split(), y.split() for i in x: if self.source.words_freq.get(i) <= x.count(i): return False for i in y: if self.target.words_freq.get(i) <= y.count(i): return False return True return False def _valid_parallel_data(self, x, y): assert len(x) == len(y), f"Size of {self.source_language} ({len(x)}) != {self.target_language} ({len(y)})" def _update_filters(self, x, y): x = self.source.preprocess(x) y = self.target.preprocess(y) for i in x.split(): self.source.words_freq.subtract([i]) for i in y.split(): self.target.words_freq.subtract([i]) self.source.phrases_freq.subtract([x]) self.target.phrases_freq.subtract([y]) def _get_vocab_data(self, source_vocab_size: int = None, target_vocab_size: int = None, order='most_common'): source_vocab_data = {} target_vocab_data = {} if source_vocab_size is not None: source_vocab_data = self.source.sample_words_freq(max_items=source_vocab_size, order=order) if target_vocab_size is not None: target_vocab_data = self.target.sample_words_freq(max_items=target_vocab_size, order=order) for x, y in zip(self.source.data, self.target.data): if source_vocab_size: if not all(list(map(lambda w: w in source_vocab_data, self.source.preprocess(x).split()))): continue if target_vocab_size: if not all(list(map(lambda w: w in target_vocab_data, self.target.preprocess(y).split()))): continue yield [x, y] def save(self, save_on_dir: str, take_split: bool = True, test_max_size: int = None, source_vocab_size: int = None, target_vocab_size: int = None, shuffle=True, prefix=None, ext='align', **kwargs): save_on_dir = Path(save_on_dir) if take_split: x_train, y_train, x_test, y_test = self.split_data(test_max_size=test_max_size, source_vocab_size=source_vocab_size, target_vocab_size=target_vocab_size, take_parallel_data=False, shuffle=shuffle) train_prefix, test_prefix = (f'{prefix}_train', f'{prefix}_test') if prefix is not None else ( 'train', 'test') data_to_save = ((train_prefix, x_train, y_train), (test_prefix, x_test, y_test)) else: data_to_save = ((prefix, self.source.data, self.target.data),) for prefix, x, y in data_to_save: save_on = save_on_dir.join(f'{prefix}_{self.source_language}.{ext.strip(".")}') FileIO.create(save_on, data=x).save(**kwargs) save_on = save_on_dir.join(f'{prefix}_{self.target_language}.{ext.strip(".")}') FileIO.create(save_on, data=y).save(**kwargs) @classmethod def load_corpus_from_csv(cls, path_: str, src_col_name: str, trg_col_name: str, source_name=None, target_name=None): csv_read = csv.DictReader(FileIO.load(path_).data) src_data = [] trg_data = [] for i in csv_read: for col_name in (src_col_name, trg_col_name): if col_name not in i: raise ValueError(f"Not found col <{col_name}> in {list(i.keys())}") src_data.append(i[src_col_name]) trg_data.append(i[trg_col_name]) return cls(src_data, trg_data, source_name=source_name, target_name=target_name)
MIT License
kymatio/kymatio
kymatio/scattering3d/backend/tensorflow_backend.py
modulus_rotation
python
def modulus_rotation(x, module): if module is None: module = tf.zeros_like(x, tf.float32) else: module = module ** 2 module += tf.abs(x) ** 2 return tf.sqrt(module)
Used for computing rotation invariant scattering transform coefficents. Parameters ---------- x : tensor Size (batchsize, M, N, O). module : tensor Tensor that holds the overall sum. Returns ------- output : tensor Tensor of the same size as input_array. It holds the output of the operation:: $\\sqrt{\\sum_m (\\text{input}_\\text{array} \\star \\psi_{j,l,m})^2)}$ which is covariant to 3D translations and rotations.
https://github.com/kymatio/kymatio/blob/38cead012d1b134843a1dd0d5ea160042037c7da/kymatio/scattering3d/backend/tensorflow_backend.py#L30-L56
import tensorflow as tf import numpy as np from collections import namedtuple BACKEND_NAME = 'tensorflow' def complex_modulus(x): modulus = tf.abs(x) return modulus
BSD 3-Clause New or Revised License
juju/charm-helpers
charmhelpers/contrib/openstack/deferred_events.py
get_deferred_events
python
def get_deferred_events(): events = [] for _, event in deferred_events(): events.append(event) return events
Return a list of deferred events requested by the charm and packages. :returns: List of deferred events :rtype: List[ServiceEvent]
https://github.com/juju/charm-helpers/blob/25b740578385d15b38f11bed8e4b6e732bdfb7c6/charmhelpers/contrib/openstack/deferred_events.py#L225-L234
import datetime import glob import yaml import os import time import uuid import charmhelpers.contrib.openstack.policy_rcd as policy_rcd import charmhelpers.core.hookenv as hookenv import charmhelpers.core.host as host import charmhelpers.core.unitdata as unitdata import subprocess DEFERRED_EVENTS_DIR = policy_rcd.POLICY_DEFERRED_EVENTS_DIR class ServiceEvent(): def __init__(self, timestamp, service, reason, action, policy_requestor_name=None, policy_requestor_type=None): self.timestamp = timestamp self.service = service self.reason = reason self.action = action if policy_requestor_name: self.policy_requestor_name = policy_requestor_name else: self.policy_requestor_name = hookenv.service_name() if policy_requestor_type: self.policy_requestor_type = policy_requestor_type else: self.policy_requestor_type = 'charm' def __eq__(self, other): for attr in vars(self): if getattr(self, attr) != getattr(other, attr): return False return True def matching_request(self, other): for attr in ['service', 'action', 'reason']: if getattr(self, attr) != getattr(other, attr): return False return True @classmethod def from_dict(cls, data): return cls( data['timestamp'], data['service'], data['reason'], data['action'], data.get('policy_requestor_name'), data.get('policy_requestor_type')) def deferred_events_files(): return glob.glob('{}/*.deferred'.format(DEFERRED_EVENTS_DIR)) def read_event_file(file_name): with open(file_name, 'r') as f: contents = yaml.safe_load(f) event = ServiceEvent( contents['timestamp'], contents['service'], contents['reason'], contents['action'], policy_requestor_name=contents.get('policy_requestor_name'), policy_requestor_type=contents.get('policy_requestor_type')) return event def deferred_events(): events = [] for defer_file in deferred_events_files(): events.append((defer_file, read_event_file(defer_file))) return events def duplicate_event_files(event): duplicates = [] for event_file, existing_event in deferred_events(): if event.matching_request(existing_event): duplicates.append(event_file) return duplicates def get_event_record_file(policy_requestor_type, policy_requestor_name): file_name = '{}/{}-{}-{}.deferred'.format( DEFERRED_EVENTS_DIR, policy_requestor_type, policy_requestor_name, uuid.uuid1()) return file_name def save_event(event): requestor_name = hookenv.service_name() requestor_type = 'charm' init_policy_log_dir() if duplicate_event_files(event): hookenv.log( "Not writing new event, existing event found. {} {} {}".format( event.service, event.action, event.reason), level="DEBUG") else: record_file = get_event_record_file( policy_requestor_type=requestor_type, policy_requestor_name=requestor_name) with open(record_file, 'w') as f: data = { 'timestamp': event.timestamp, 'service': event.service, 'action': event.action, 'reason': event.reason, 'policy_requestor_type': requestor_type, 'policy_requestor_name': requestor_name} yaml.dump(data, f) def clear_deferred_events(svcs, action): for defer_file in deferred_events_files(): deferred_event = read_event_file(defer_file) if deferred_event.service in svcs: os.remove(defer_file) def init_policy_log_dir(): if not os.path.exists(DEFERRED_EVENTS_DIR): os.mkdir(DEFERRED_EVENTS_DIR)
Apache License 2.0
daeilkim/refinery
refinery/bnpy/bnpy-dev/demodata/DeadLeaves.py
makeImgPatchPrototype
python
def makeImgPatchPrototype(D, compID): P = np.sqrt(D) Xprototype = np.zeros((P,P)) if compID % 4 == 0: Xprototype[:P/2] = 1.0 Xprototype = np.rot90( Xprototype, compID/4 ) if compID % 4 == 1: Xprototype[np.tril_indices(P)] = 1 Xprototype = np.rot90( Xprototype, (compID-1)/4 ) if compID % 4 == 2: Xprototype[np.tril_indices(P, 2)] = 1 Xprototype = np.rot90( Xprototype, (compID-2)/4 ) if compID % 4 == 3: Xprototype[np.tril_indices(P, -2)] = 1 Xprototype = np.rot90( Xprototype, (compID-3)/4 ) return Xprototype
Create image patch prototype for specific component Returns -------- Xprototype : sqrt(D) x sqrt(D) matrix
https://github.com/daeilkim/refinery/blob/0d5de8fc3d680a2c79bd0e9384b506229787c74f/refinery/bnpy/bnpy-dev/demodata/DeadLeaves.py#L56-L77
import scipy.linalg import numpy as np from bnpy.data import XData, MinibatchIterator def get_short_name(): return 'DeadLeavesD%d' % (D) def get_data_info(): return 'Dead Leaves Data. K=%d. D=%d.' % (K,D) def get_data(seed=8675309, nObsTotal=25000, **kwargs): X, TrueZ = generateData( seed, nObsTotal) Data = XData(X=X, TrueZ=TrueZ) Data.summary = get_data_info() return Data def get_minibatch_iterator(seed=8675309, nObsTotal=25000, **kwargs): X, TrueZ = generateData(seed, nObsTotal) Data = XData(X=X, TrueZ=TrueZ) DataIterator = MinibatchIterator(Data, **kwargs) DataIterator.summary = get_data_info() return DataIterator def makeTrueParams(Din): global K, D D = Din K = 8 global w global Mu global Sigma global cholSigma w = np.ones(K) w = w / np.sum(w) Mu = np.zeros((K,D)) Sigma = np.zeros((K,D,D)) cholSigma = np.zeros(Sigma.shape) for k in xrange(K): Sigma[k] = makeImgPatchCovMatForComp(D, k) cholSigma[k] = scipy.linalg.cholesky(Sigma[k])
MIT License
dials/dials
algorithms/refinement/parameterisation/prediction_parameters.py
XYPhiPredictionParameterisation._xl_unit_cell_derivatives
python
def _xl_unit_cell_derivatives( self, isel, parameterisation=None, dB_dxluc_p=None, reflections=None ): return self._xl_derivatives( isel, dB_dxluc_p, b_matrix=False, parameterisation=parameterisation )
helper function to extend the derivatives lists by derivatives of the crystal unit cell parameterisations
https://github.com/dials/dials/blob/a2cb71bf410e179b92554bcce2e21388e1dc25d1/algorithms/refinement/parameterisation/prediction_parameters.py#L848-L855
from collections import namedtuple from scitbx import matrix, sparse from dials.algorithms.refinement import DialsRefineConfigError from dials.array_family import flex ParamSet = namedtuple( "ParamSet", ["beam_param", "xl_ori_param", "xl_uc_param", "det_param", "gonio_param"], ) class PredictionParameterisation: def __init__( self, experiments, detector_parameterisations=None, beam_parameterisations=None, xl_orientation_parameterisations=None, xl_unit_cell_parameterisations=None, goniometer_parameterisations=None, ): if detector_parameterisations is None: detector_parameterisations = [] if beam_parameterisations is None: beam_parameterisations = [] if xl_orientation_parameterisations is None: xl_orientation_parameterisations = [] if xl_unit_cell_parameterisations is None: xl_unit_cell_parameterisations = [] if goniometer_parameterisations is None: goniometer_parameterisations = [] self._experiments = experiments self._detector_parameterisations = detector_parameterisations self._beam_parameterisations = beam_parameterisations self._xl_orientation_parameterisations = xl_orientation_parameterisations self._xl_unit_cell_parameterisations = xl_unit_cell_parameterisations self._goniometer_parameterisations = goniometer_parameterisations self._update() def _update(self): self._length = self._len() if self._length == 0: raise DialsRefineConfigError("There are no free parameters for refinement") e2bp = { ids: i for i, p in enumerate(self._beam_parameterisations) for ids in p.get_experiment_ids() } e2xop = { ids: i for i, p in enumerate(self._xl_orientation_parameterisations) for ids in p.get_experiment_ids() } e2xucp = { ids: i for i, p in enumerate(self._xl_unit_cell_parameterisations) for ids in p.get_experiment_ids() } e2dp = { ids: i for i, p in enumerate(self._detector_parameterisations) for ids in p.get_experiment_ids() } e2gp = { ids: i for i, p in enumerate(self._goniometer_parameterisations) for ids in p.get_experiment_ids() } self._exp_to_param = { i: ParamSet( e2bp.get(i), e2xop.get(i), e2xucp.get(i), e2dp.get(i), e2gp.get(i) ) for i, _ in enumerate(self._experiments) } def get_detector_parameterisations(self): return self._detector_parameterisations def get_beam_parameterisations(self): return self._beam_parameterisations def get_crystal_orientation_parameterisations(self): return self._xl_orientation_parameterisations def get_crystal_unit_cell_parameterisations(self): return self._xl_unit_cell_parameterisations def get_goniometer_parameterisations(self): return self._goniometer_parameterisations def _len(self): length = 0 for model in self._detector_parameterisations: length += model.num_free() for model in self._beam_parameterisations: length += model.num_free() for model in self._xl_orientation_parameterisations: length += model.num_free() for model in self._xl_unit_cell_parameterisations: length += model.num_free() for model in self._goniometer_parameterisations: length += model.num_free() return length def __len__(self): return self._length def get_param_vals(self): global_p_list = [] if self._detector_parameterisations: det_plists = [x.get_param_vals() for x in self._detector_parameterisations] params = [x for l in det_plists for x in l] global_p_list.extend(params) if self._beam_parameterisations: src_plists = [x.get_param_vals() for x in self._beam_parameterisations] params = [x for l in src_plists for x in l] global_p_list.extend(params) if self._xl_orientation_parameterisations: xlo_plists = [ x.get_param_vals() for x in self._xl_orientation_parameterisations ] params = [x for l in xlo_plists for x in l] global_p_list.extend(params) if self._xl_unit_cell_parameterisations: xluc_plists = [ x.get_param_vals() for x in self._xl_unit_cell_parameterisations ] params = [x for l in xluc_plists for x in l] global_p_list.extend(params) if self._goniometer_parameterisations: gon_plists = [ x.get_param_vals() for x in self._goniometer_parameterisations ] params = [x for l in gon_plists for x in l] global_p_list.extend(params) return global_p_list def get_param_names(self): param_names = [] for p in self._detector_parameterisations: prefix = p.model_identifier param_names.extend([prefix + x for x in p.get_param_names()]) for p in self._beam_parameterisations: prefix = p.model_identifier param_names.extend([prefix + x for x in p.get_param_names()]) for p in self._xl_orientation_parameterisations: prefix = p.model_identifier param_names.extend([prefix + x for x in p.get_param_names()]) for p in self._xl_unit_cell_parameterisations: prefix = p.model_identifier param_names.extend([prefix + x for x in p.get_param_names()]) for p in self._goniometer_parameterisations: prefix = p.model_identifier param_names.extend([prefix + x for x in p.get_param_names()]) return param_names def _modify_parameters(self, vals, set_vals=False, set_esds=False, set_fix=False): assert [set_vals, set_esds, set_fix].count(True) == 1 assert len(vals) == len(self) it = iter(vals) for model in ( self._detector_parameterisations + self._beam_parameterisations + self._xl_orientation_parameterisations + self._xl_unit_cell_parameterisations + self._goniometer_parameterisations ): tmp = [next(it) for i in range(model.num_free())] if set_esds: model.set_param_esds(tmp) elif set_vals: model.set_param_vals(tmp) elif set_fix: current_fixes = model.get_fixed() free_indices = [i for i, e in enumerate(current_fixes) if not e] assert len(free_indices) == model.num_free() for i, fix in zip(free_indices, tmp): if fix: current_fixes[i] = True model.set_fixed(current_fixes) def set_param_vals(self, vals): self._modify_parameters(vals, set_vals=True) def set_param_esds(self, esds): self._modify_parameters(esds, set_esds=True) def fix_params(self, fix): self._modify_parameters(fix, set_fix=True) self._update() def calculate_model_state_uncertainties(self, var_cov): i = 0 for model in ( self._detector_parameterisations + self._beam_parameterisations + self._xl_orientation_parameterisations + self._xl_unit_cell_parameterisations + self._goniometer_parameterisations ): n = model.num_free() sub = var_cov.matrix_copy_block(i, i, n, n) state_covs = model.calculate_state_uncertainties(sub) if state_covs is None: continue if len(state_covs) == 1: model.set_state_uncertainties(state_covs[0]) else: for i_state, state_cov in enumerate(state_covs): model.set_state_uncertainties(state_cov, multi_state_elt=i_state) i += n def get_gradients(self, reflections, callback=None): self._nref = len(reflections) self._D = flex.mat3_double(self._nref) self._s0 = flex.vec3_double(self._nref) self._U = flex.mat3_double(self._nref) self._B = flex.mat3_double(self._nref) self._axis = flex.vec3_double(self._nref) self._fixed_rotation = flex.mat3_double(self._nref) self._setting_rotation = flex.mat3_double(self._nref) self._experiment_to_idx = [] for iexp, exp in enumerate(self._experiments): sel = reflections["id"] == iexp isel = sel.iselection() self._experiment_to_idx.append(isel) subref = reflections.select(sel) states = self._get_model_data_for_experiment(exp, subref) self._D.set_selected(sel, states["D"]) self._s0.set_selected(sel, states["s0"]) self._U.set_selected(sel, states["U"]) self._B.set_selected(sel, states["B"]) if exp.goniometer: self._setting_rotation.set_selected(sel, states["S"]) self._axis.set_selected(sel, exp.goniometer.get_rotation_axis_datum()) self._fixed_rotation.set_selected( sel, exp.goniometer.get_fixed_rotation() ) self._h = reflections["miller_index"].as_vec3_double() self._UB = self._U * self._B self._s1 = reflections["s1"] self._pv = self._D * self._s1 u, v, w = self._pv.parts() assert w.all_ne(0) self._w_inv = 1.0 / w self._u_w_inv = u * self._w_inv self._v_w_inv = v * self._w_inv self._iparam = 0 self._local_setup(reflections) results = [] results = self._grads_detector_loop(reflections, results, callback=callback) results = self._grads_beam_loop(reflections, results, callback=callback) results = self._grads_xl_orientation_loop( reflections, results, callback=callback ) results = self._grads_xl_unit_cell_loop(reflections, results, callback=callback) results = self._grads_goniometer_loop(reflections, results, callback=callback) return results @staticmethod def _extend_gradient_vectors(results, m, n, keys=("dX_dp", "dY_dp", "dZ_dp")): new_results = [] for i in range(n): result = {} for key in keys: result[key] = flex.double(m, 0.0) new_results.append(result) results.extend(new_results) return results def _get_model_data_for_experiment(self, experiment, reflections): D = flex.mat3_double(len(reflections)) panels = reflections["panel"] for ipanel, D_mat in enumerate([p.get_D_matrix() for p in experiment.detector]): sel = panels == ipanel D.set_selected(sel, D_mat) result = { "s0": experiment.beam.get_s0(), "U": matrix.sqr(experiment.crystal.get_U()), "B": matrix.sqr(experiment.crystal.get_B()), "D": D, } if experiment.goniometer: result["S"] = matrix.sqr(experiment.goniometer.get_setting_rotation()) return result def _detector_derivatives( self, isel, panel_id, parameterisation=None, dd_ddet_p=None, reflections=None ): pv = self._pv.select(isel) D = self._D.select(isel) if dd_ddet_p is None: dd_ddet_p = parameterisation.get_ds_dp( multi_state_elt=panel_id, use_none_as_null=True ) dd_ddet_p = [ None if e is None else flex.mat3_double(len(D), e.elems) for e in dd_ddet_p ] dpv_ddet_p = [ der if der is None else (D * (der * -1.0)) * pv for der in dd_ddet_p ] return dpv_ddet_p def _beam_derivatives( self, isel, parameterisation=None, ds0_dbeam_p=None, reflections=None ): pass def _xl_orientation_derivatives( self, isel, parameterisation=None, dU_dxlo_p=None, reflections=None ): pass def _xl_unit_cell_derivatives( self, isel, parameterisation=None, dB_dxluc_p=None, reflections=None ): pass def _goniometer_derivatives( self, isel, parameterisation=None, dS_dgon_p=None, reflections=None ): pass def _grads_detector_loop(self, reflections, results, callback=None): for dp in self._detector_parameterisations: isel = flex.size_t() for exp_id in dp.get_experiment_ids(): isel.extend(self._experiment_to_idx[exp_id]) detector = dp.get_model() panel = reflections["panel"].select(isel) results = self._extend_gradient_vectors( results, self._nref, dp.num_free(), keys=self._grad_names ) for panel_id, _ in enumerate(detector): sub_isel = isel.select(panel == panel_id) if len(sub_isel) == 0: continue dpv_ddet_p = self._detector_derivatives( sub_isel, panel_id, parameterisation=dp, reflections=reflections ) sub_w_inv = self._w_inv.select(sub_isel) sub_u_w_inv = self._u_w_inv.select(sub_isel) sub_v_w_inv = self._v_w_inv.select(sub_isel) dX_ddet_p, dY_ddet_p = self._calc_dX_dp_and_dY_dp_from_dpv_dp( sub_w_inv, sub_u_w_inv, sub_v_w_inv, dpv_ddet_p ) iparam = self._iparam for dX, dY in zip(dX_ddet_p, dY_ddet_p): if dX is not None: results[iparam][self._grad_names[0]].set_selected(sub_isel, dX) if dY is not None: results[iparam][self._grad_names[1]].set_selected(sub_isel, dY) iparam += 1 if callback is not None: iparam = self._iparam for i in range(dp.num_free()): results[iparam] = callback(results[iparam]) iparam += 1 self._iparam += dp.num_free() return results def _grads_model_loop( self, parameterisations, reflections, results, callback=None, derivatives_fn=None, ): for p in parameterisations: isel = flex.size_t() for exp_id in p.get_experiment_ids(): isel.extend(self._experiment_to_idx[exp_id]) results = self._extend_gradient_vectors( results, self._nref, p.num_free(), keys=self._grad_names ) if len(isel) == 0: if callback: for _ in range(p.num_free()): results[self._iparam] = callback(results[self._iparam]) self._iparam += 1 else: self._iparam += p.num_free() continue w_inv = self._w_inv.select(isel) u_w_inv = self._u_w_inv.select(isel) v_w_inv = self._v_w_inv.select(isel) dpv_dbeam_p, dAngle_dbeam_p = derivatives_fn( isel, parameterisation=p, reflections=reflections ) dX_dbeam_p, dY_dbeam_p = self._calc_dX_dp_and_dY_dp_from_dpv_dp( w_inv, u_w_inv, v_w_inv, dpv_dbeam_p ) for dX, dY, dAngle in zip(dX_dbeam_p, dY_dbeam_p, dAngle_dbeam_p): if dX is not None: results[self._iparam][self._grad_names[0]].set_selected(isel, dX) if dY is not None: results[self._iparam][self._grad_names[1]].set_selected(isel, dY) if dAngle is not None: results[self._iparam][self._grad_names[2]].set_selected( isel, dAngle ) if callback is not None: results[self._iparam] = callback(results[self._iparam]) self._iparam += 1 return results def _grads_beam_loop(self, reflections, results, callback=None): return self._grads_model_loop( self._beam_parameterisations, reflections, results, derivatives_fn=self._beam_derivatives, callback=callback, ) def _grads_xl_orientation_loop(self, reflections, results, callback=None): return self._grads_model_loop( self._xl_orientation_parameterisations, reflections, results, derivatives_fn=self._xl_orientation_derivatives, callback=callback, ) def _grads_xl_unit_cell_loop(self, reflections, results, callback=None): return self._grads_model_loop( self._xl_unit_cell_parameterisations, reflections, results, derivatives_fn=self._xl_unit_cell_derivatives, callback=callback, ) def _grads_goniometer_loop(self, reflections, results, callback=None): return self._grads_model_loop( self._goniometer_parameterisations, reflections, results, derivatives_fn=self._goniometer_derivatives, callback=callback, ) class SparseGradientVectorMixin: @staticmethod def _extend_gradient_vectors(results, m, n, keys=("dX_dp", "dY_dp", "dZ_dp")): new_results = [{key: sparse.matrix_column(m) for key in keys} for _ in range(n)] results.extend(new_results) return results class XYPhiPredictionParameterisation(PredictionParameterisation): _grad_names = ("dX_dp", "dY_dp", "dphi_dp") def _local_setup(self, reflections): self._phi_calc = reflections["xyzcal.mm"].parts()[2] q = self._fixed_rotation * (self._UB * self._h) self._r = self._setting_rotation * q.rotate_around_origin( self._axis, self._phi_calc ) self._e_X_r = (self._setting_rotation * self._axis).cross(self._r) self._e_r_s0 = (self._e_X_r).dot(self._s0) e_r_s0_mag = flex.abs(self._e_r_s0) try: assert flex.min(e_r_s0_mag) > 1.0e-6 except AssertionError as e: imin = flex.min_index(e_r_s0_mag) print("(e X r).s0 too small:") print("for", (e_r_s0_mag <= 1.0e-6).count(True), "reflections") print("out of", len(e_r_s0_mag), "total") print("such as", reflections["miller_index"][imin]) print("with scattering vector", reflections["s1"][imin]) print("where r =", self._r[imin]) print("e =", self._axis[imin]) print("s0 =", self._s0[imin]) print("this reflection forms angle with the equatorial plane " "normal:") vecn = ( matrix.col(self._s0[imin]) .cross(matrix.col(self._axis[imin])) .normalize() ) print(matrix.col(reflections["s1"][imin]).accute_angle(vecn)) raise e def _beam_derivatives( self, isel, parameterisation=None, ds0_dbeam_p=None, reflections=None ): r = self._r.select(isel) e_X_r = self._e_X_r.select(isel) e_r_s0 = self._e_r_s0.select(isel) D = self._D.select(isel) if ds0_dbeam_p is None: ds0_dbeam_p = parameterisation.get_ds_dp(use_none_as_null=True) ds0_dbeam_p = [ None if e is None else flex.vec3_double(len(r), e.elems) for e in ds0_dbeam_p ] dphi_dp = [] dpv_dp = [] for der in ds0_dbeam_p: if der is None: dphi_dp.append(None) dpv_dp.append(None) continue dphi = (r.dot(der) / e_r_s0) * -1.0 dphi_dp.append(dphi) dpv_dp.append(D * (e_X_r * dphi + der)) return dpv_dp, dphi_dp def _xl_derivatives(self, isel, derivatives, b_matrix, parameterisation=None): axis = self._axis.select(isel) fixed_rotation = self._fixed_rotation.select(isel) setting_rotation = self._setting_rotation.select(isel) phi_calc = self._phi_calc.select(isel) h = self._h.select(isel) s1 = self._s1.select(isel) e_X_r = self._e_X_r.select(isel) e_r_s0 = self._e_r_s0.select(isel) if b_matrix: B = self._B.select(isel) else: U = self._U.select(isel) D = self._D.select(isel) if derivatives is None: derivatives = [ None if der is None else flex.mat3_double(len(isel), der.elems) for der in parameterisation.get_ds_dp(use_none_as_null=True) ] dphi_dp = [] dpv_dp = [] for der in derivatives: if der is None: dphi_dp.append(None) dpv_dp.append(None) continue if b_matrix: tmp = fixed_rotation * (der * B * h) else: tmp = fixed_rotation * (U * der * h) dr = setting_rotation * tmp.rotate_around_origin(axis, phi_calc) dphi = -1.0 * dr.dot(s1) / e_r_s0 dphi_dp.append(dphi) dpv_dp.append(D * (dr + e_X_r * dphi)) return dpv_dp, dphi_dp def _xl_orientation_derivatives( self, isel, parameterisation=None, dU_dxlo_p=None, reflections=None ): return self._xl_derivatives( isel, dU_dxlo_p, b_matrix=True, parameterisation=parameterisation )
BSD 3-Clause New or Revised License
twisted/txaws
txaws/tests/test_auth_v4.py
_create_canonical_request_fixture
python
def _create_canonical_request_fixture(): return _CanonicalRequest(method="POST", canonical_uri="/", canonical_query_string="qs", canonical_headers="headers", signed_headers=b"signed headers", payload_hash=b"payload hash")
Make a L{_CanonicalRequest} instance with fixed data. @return: A canonical request. @rtype: L{_CanonicalRequest}
https://github.com/twisted/txaws/blob/5c3317376cd47e536625027e38c3b37840175ce0/txaws/tests/test_auth_v4.py#L34-L46
import datetime import hashlib import hmac import urlparse from twisted.trial import unittest from txaws._auth_v4 import ( _CanonicalRequest, _Credential, _CredentialScope, _SignableAWS4HMAC256Token, _make_authorization_header, _make_canonical_headers, _make_canonical_query_string, _make_canonical_uri, _make_signed_headers, getSignatureKey, makeAMZDate, makeDateStamp, sign, ) from txaws.credentials import AWSCredentials from txaws.service import REGION_US_EAST_1
MIT License
geoffxy/habitat
experiments/gnmt/seq2seq/train/fp_optimizers.py
Fp16Optimizer.step
python
def step(self, loss, optimizer, scheduler, update=True): loss *= self.loss_scale loss.backward() if update: self.set_grads(self.fp32_params, self.fp16_model.parameters()) if self.loss_scale != 1.0: for param in self.fp32_params: param.grad.data /= self.loss_scale norm = clip_grad_norm_(self.fp32_params, self.grad_clip) if math.isfinite(norm): scheduler.step() optimizer.step() self.set_weights(self.fp16_model.parameters(), self.fp32_params) self.since_last_invalid += 1 else: self.loss_scale /= self.dls_downscale self.since_last_invalid = 0 logging.info(f'Gradient norm: {norm}') logging.info(f'Skipped batch, new scale: {self.loss_scale}') if self.since_last_invalid >= self.dls_upscale_interval: self.loss_scale *= self.dls_upscale self.loss_scale = min(self.loss_scale, 8192.0) logging.info(f'Upscaling, new scale: {self.loss_scale}') self.since_last_invalid = 0 self.fp16_model.zero_grad()
Performs one step of the optimizer. Applies loss scaling, computes gradients in fp16, converts gradients to fp32, inverts scaling and applies optional gradient norm clipping. If gradients are finite, it applies update to fp32 master weights and copies updated parameters to fp16 model for the next iteration. If gradients are not finite, it skips the batch and adjusts scaling factor for the next iteration. :param loss: value of loss function :param optimizer: optimizer :param update: if True executes weight update
https://github.com/geoffxy/habitat/blob/decc70d18c4a1db7bb109fd59b2b60567bf74375/experiments/gnmt/seq2seq/train/fp_optimizers.py#L78-L121
import logging import math import torch from torch.nn.utils import clip_grad_norm_ class Fp16Optimizer: @staticmethod def set_grads(params, params_with_grad): for param, param_w_grad in zip(params, params_with_grad): if param.grad is None: param.grad = torch.nn.Parameter(torch.empty_like(param)) param.grad.data.copy_(param_w_grad.grad.data) @staticmethod def set_weights(params, new_params): for param, new_param in zip(params, new_params): param.data.copy_(new_param.data) def __init__(self, fp16_model, grad_clip=float('inf'), loss_scale=8192, dls_downscale=2, dls_upscale=2, dls_upscale_interval=128): logging.info('Initializing fp16 optimizer') self.initialize_model(fp16_model) self.since_last_invalid = 0 self.loss_scale = loss_scale self.dls_downscale = dls_downscale self.dls_upscale = dls_upscale self.dls_upscale_interval = dls_upscale_interval self.grad_clip = grad_clip def initialize_model(self, model): logging.info('Initializing fp32 clone weights') self.fp16_model = model self.fp16_model.zero_grad() self.fp32_params = [param.to(torch.float32).detach() for param in model.parameters()] for param in self.fp32_params: param.requires_grad = True
Apache License 2.0
cleverhans-lab/cleverhans
cleverhans_v3.1.0/cleverhans_tutorials/mnist_blackbox.py
prep_bbox
python
def prep_bbox( sess, x, y, x_train, y_train, x_test, y_test, nb_epochs, batch_size, learning_rate, rng, nb_classes=10, img_rows=28, img_cols=28, nchannels=1, ): nb_filters = 64 model = ModelBasicCNN("model1", nb_classes, nb_filters) loss = CrossEntropy(model, smoothing=0.1) predictions = model.get_logits(x) print("Defined TensorFlow model graph.") train_params = { "nb_epochs": nb_epochs, "batch_size": batch_size, "learning_rate": learning_rate, } train(sess, loss, x_train, y_train, args=train_params, rng=rng) eval_params = {"batch_size": batch_size} accuracy = model_eval(sess, x, y, predictions, x_test, y_test, args=eval_params) print("Test accuracy of black-box on legitimate test " "examples: " + str(accuracy)) return model, predictions, accuracy
Define and train a model that simulates the "remote" black-box oracle described in the original paper. :param sess: the TF session :param x: the input placeholder for MNIST :param y: the ouput placeholder for MNIST :param x_train: the training data for the oracle :param y_train: the training labels for the oracle :param x_test: the testing data for the oracle :param y_test: the testing labels for the oracle :param nb_epochs: number of epochs to train model :param batch_size: size of training batches :param learning_rate: learning rate for training :param rng: numpy.random.RandomState :return:
https://github.com/cleverhans-lab/cleverhans/blob/4aed4be702be5ce13d5017b8a3c6a2cdc4fc0009/cleverhans_v3.1.0/cleverhans_tutorials/mnist_blackbox.py#L59-L113
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import functools import logging import numpy as np from six.moves import xrange import tensorflow as tf from cleverhans.attacks import FastGradientMethod from cleverhans.utils_tf import jacobian_graph, jacobian_augmentation from cleverhans.compat import flags from cleverhans.dataset import MNIST from cleverhans.initializers import HeReLuNormalInitializer from cleverhans.loss import CrossEntropy from cleverhans.model import Model from cleverhans.train import train from cleverhans.utils import set_log_level from cleverhans.utils import TemporaryLogLevel from cleverhans.utils import to_categorical from cleverhans.utils_tf import model_eval, batch_eval from cleverhans.model_zoo.basic_cnn import ModelBasicCNN FLAGS = flags.FLAGS NB_CLASSES = 10 BATCH_SIZE = 128 LEARNING_RATE = 0.001 NB_EPOCHS = 10 HOLDOUT = 150 DATA_AUG = 6 NB_EPOCHS_S = 10 LMBDA = 0.1 AUG_BATCH_SIZE = 512 def setup_tutorial(): tf.set_random_seed(1234) return True
MIT License
aleju/imgaug
imgaug/random.py
RNG.copy_unless_global_rng
python
def copy_unless_global_rng(self): if self.is_global_rng(): return self return self.copy()
Create a copy of this RNG unless it is the global RNG. Returns ------- RNG Copy of this RNG unless it is the global RNG. In the latter case the RNG instance itself will be returned without any changes.
https://github.com/aleju/imgaug/blob/0101108d4fed06bc5056c4a03e2bcb0216dac326/imgaug/random.py#L400-L412
from __future__ import print_function, division, absolute_import import copy as copylib import numpy as np import six.moves as sm SUPPORTS_NEW_NP_RNG_STYLE = False BIT_GENERATOR = None _NP_VERSION = list(map(int, np.__version__.split(".")[0:2])) if _NP_VERSION[0] > 1 or _NP_VERSION[1] >= 17: SUPPORTS_NEW_NP_RNG_STYLE = True BIT_GENERATOR = np.random.SFC64 if _NP_VERSION[1] == 17: _BIT_GENERATOR_INTERFACE = np.random.bit_generator.BitGenerator else: _BIT_GENERATOR_INTERFACE = np.random.BitGenerator GLOBAL_RNG = None SEED_MIN_VALUE = 0 SEED_MAX_VALUE = 2**31-1 _RNG_IDX = 1 class RNG(object): def __init__(self, generator): global _RNG_IDX if isinstance(generator, RNG): self.generator = generator.generator else: self.generator = normalize_generator_(generator) self._is_new_rng_style = ( not isinstance(self.generator, np.random.RandomState)) self._idx = _RNG_IDX _RNG_IDX += 1 @property def state(self): return get_generator_state(self.generator) @state.setter def state(self, value): self.set_state_(value) def set_state_(self, value): set_generator_state_(self.generator, value) return self def use_state_of_(self, other): return self.set_state_(other.state) def is_global_rng(self): return get_global_rng().generator is self.generator def equals_global_rng(self): return get_global_rng().equals(self) def generate_seed_(self): return generate_seed_(self.generator) def generate_seeds_(self, n): return generate_seeds_(self.generator, n) def reset_cache_(self): reset_generator_cache_(self.generator) return self def derive_rng_(self): return self.derive_rngs_(1)[0] def derive_rngs_(self, n): return [RNG(gen) for gen in derive_generators_(self.generator, n)] def equals(self, other): assert isinstance(other, RNG), ( "Expected 'other' to be an RNG, got type %s. " "Use imgaug.random.is_generator_equal_to() to compare " "numpy generators or RandomStates." % (type(other),)) return is_generator_equal_to(self.generator, other.generator) def advance_(self): advance_generator_(self.generator) return self def copy(self): return RNG(copy_generator(self.generator))
MIT License
scottkirkwood/key-mon
src/keymon/shaped_window.py
ShapedWindow.fade_away
python
def fade_away(self): self.shown = False self.timeout_timer = GLib.timeout_add(int(self.timeout * 1000), self.hide)
Make the window fade in a little bit.
https://github.com/scottkirkwood/key-mon/blob/d609880417479a86684b35dc273d1aa1b6c52627/src/keymon/shaped_window.py#L113-L117
import gi gi.require_version("Gtk", "3.0") from gi.repository import Gtk, Gdk, GLib from . import lazy_pixbuf_creator class ShapedWindow(Gtk.Window): def __init__(self, fname, scale=1.0, timeout=0.2): Gtk.Window.__init__(self) self.connect('size-allocate', self._on_size_allocate) self.set_decorated(False) self.set_keep_above(True) self.set_accept_focus(False) self.scale = scale self.shown = False self.timeout = timeout self.timeout_timer = None self.name_fnames = { 'mouse' : [fname], } self.pixbufs = lazy_pixbuf_creator.LazyPixbufCreator(self.name_fnames, self.scale) self.pixbuf = self.pixbufs.get('mouse') self.resize(self.pixbuf.get_width(), self.pixbuf.get_height()) self.image = Gtk.Image.new_from_pixbuf(self.pixbuf) rgba = self.get_screen().get_rgba_visual() if rgba is not None: self.set_visual(rgba) self.set_name("mouse-follow") provider = Gtk.CssProvider() provider.load_from_data( b""" #mouse-follow { background-color:rgba(0,0,0,0); } """ ) context = self.get_style_context() context.add_provider(provider, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION) self.image.show() self.add(self.image) def _on_size_allocate(self, win, unused_allocation): win.set_property('skip-taskbar-hint', True) if not win.is_composited(): print('Unable to fade the window') else: win.set_opacity(0.5) def center_on_cursor(self, x=None, y=None): if x is None or y is None: root = Gdk.Screen.get_default().get_root_window() _, x, y, _ = root.get_pointer() w, h = self.get_size() new_x, new_y = x - w/2, y - h/2 pos = self.get_position() if pos[0] != new_x or pos[1] != new_y: self.move(new_x, new_y) self.show() def show(self): if self.timeout_timer and self.shown: if GLib.main_context_default().find_source_by_id(self.timeout_timer) and not GLib.main_context_default().find_source_by_id(self.timeout_timer).is_destroyed(): GLib.source_remove(self.timeout_timer) self.timeout_timer = None super(ShapedWindow, self).show() pm = gtk.gdk.Pixmap(None, self.get_size()[0], self.get_size()[1], 1) pmcr = pm.cairo_create() pmcr.rectangle(0, 0, 1, 1) pmcr.fill() self.input_shape_combine_mask(pm, 0, 0) def maybe_show(self): if self.shown or not self.timeout_timer: return self.shown = True self.show()
Apache License 2.0
hyde/hyde
hyde/ext/plugins/css.py
SassPlugin.text_resource_complete
python
def text_resource_complete(self, resource, text): if resource.relative_path not in self.resources: return includes = [resource.node.path] + self.includes includes = [path.rstrip(os.sep) + os.sep for path in includes] options = self.options if 'include_paths' not in options: options['include_paths'] = [] options['include_paths'].extend(includes) self.logger.error(resource) try: return self.sass.compile(string=text, **options) except Exception as exc: self.logger.error(exc) raise
Run sassycss compiler on text.
https://github.com/hyde/hyde/blob/7f415402cc3e007a746eb2b5bc102281fdb415bd/hyde/ext/plugins/css.py#L476-L494
from hyde._compat import str from hyde.plugin import CLTransformer, Plugin from hyde.exceptions import HydeException import os import re import subprocess import sys from fswrap import File class LessCSSPlugin(CLTransformer): def __init__(self, site): super(LessCSSPlugin, self).__init__(site) self.import_finder = re.compile('^\\s*@import\s+(?:\'|\")([^\'\"]*)(?:\'|\")\s*\;\s*$', re.MULTILINE) @property def executable_name(self): return "lessc" def _should_parse_resource(self, resource): return resource.source_file.kind == 'less' and getattr(resource, 'meta', {}).get('parse', True) def _should_replace_imports(self, resource): return getattr(resource, 'meta', {}).get('uses_template', True) def begin_site(self): for resource in self.site.content.walk_resources(): if self._should_parse_resource(resource): new_name = resource.source_file.name_without_extension + ".css" target_folder = File(resource.relative_deploy_path).parent resource.relative_deploy_path = target_folder.child(new_name) def begin_text_resource(self, resource, text): if not self._should_parse_resource(resource) or not self._should_replace_imports(resource): return text def import_to_include(match): if not match.lastindex: return '' path = match.groups(1)[0] afile = File(resource.source_file.parent.child(path)) if len(afile.kind.strip()) == 0: afile = File(afile.path + '.less') ref = self.site.content.resource_from_path(afile.path) if not ref: raise HydeException( "Cannot import from path [%s]" % afile.path) ref.is_processable = False return self.template.get_include_statement(ref.relative_path) text = self.import_finder.sub(import_to_include, text) return text @property def plugin_name(self): return "less" def text_resource_complete(self, resource, text): if not self._should_parse_resource(resource): return supported = [ "verbose", ("silent", "s"), ("compress", "x"), "O0", "O1", "O2", "include-path=" ] less = self.app source = File.make_temp(text) target = File.make_temp('') args = [str(less)] args.extend(self.process_args(supported)) args.extend([str(source), str(target)]) try: self.call_app(args) except subprocess.CalledProcessError: HydeException.reraise( "Cannot process %s. Error occurred when " "processing [%s]" % (self.app.name, resource.source_file), sys.exc_info()) return target.read_all() class StylusPlugin(CLTransformer): def __init__(self, site): super(StylusPlugin, self).__init__(site) self.import_finder = re.compile('^\\s*@import\s+(?:\'|\")([^\'\"]*)(?:\'|\")\s*\;?\s*$', re.MULTILINE) def begin_site(self): for resource in self.site.content.walk_resources(): if resource.source_file.kind == 'styl': new_name = resource.source_file.name_without_extension + ".css" target_folder = File(resource.relative_deploy_path).parent resource.relative_deploy_path = target_folder.child(new_name) def begin_text_resource(self, resource, text): if not resource.source_file.kind == 'styl': return def import_to_include(match): if not match.lastindex: return '' path = match.groups(1)[0] first_child = resource.source_file.parent.child(path) afile = File(File(first_child).fully_expanded_path) if len(afile.kind.strip()) == 0: afile = File(afile.path + '.styl') ref = self.site.content.resource_from_path(afile.path) if not ref: try: include = self.settings.args.include except AttributeError: include = False if not include: raise HydeException( "Cannot import from path [%s]" % afile.path) else: ref.is_processable = False return "\n" + self.template.get_include_statement(ref.relative_path) + "\n" return '@import "' + path + '"\n' text = self.import_finder.sub(import_to_include, text) return text @property def defaults(self): try: mode = self.site.config.mode except AttributeError: mode = "production" defaults = {"compress": ""} if mode.startswith('dev'): defaults = {} return defaults @property def plugin_name(self): return "stylus" def text_resource_complete(self, resource, text): if not resource.source_file.kind == 'styl': return stylus = self.app source = File.make_temp(text.strip()) supported = [("compress", "c"), ("include", "I")] args = [str(stylus)] args.extend(self.process_args(supported)) args.append(str(source)) try: self.call_app(args) except subprocess.CalledProcessError: HydeException.reraise( "Cannot process %s. Error occurred when " "processing [%s]" % (stylus.name, resource.source_file), sys.exc_info()) target = File(source.path + '.css') return target.read_all() class CleverCSSPlugin(Plugin): def __init__(self, site): super(CleverCSSPlugin, self).__init__(site) try: import clevercss except ImportError as e: raise HydeException('Unable to import CleverCSS: ' + e.message) else: self.clevercss = clevercss def _should_parse_resource(self, resource): return resource.source_file.kind == 'ccss' and getattr(resource, 'meta', {}).get('parse', True) def _should_replace_imports(self, resource): return getattr(resource, 'meta', {}).get('uses_template', True) def begin_site(self): for resource in self.site.content.walk_resources(): if self._should_parse_resource(resource): new_name = resource.source_file.name_without_extension + ".css" target_folder = File(resource.relative_deploy_path).parent resource.relative_deploy_path = target_folder.child(new_name) def begin_text_resource(self, resource, text): if not self._should_parse_resource(resource) or not self._should_replace_imports(resource): return text import_finder = re.compile( '^\\s*@import\s+(?:\'|\")([^\'\"]*)(?:\'|\")\s*\;\s*$', re.MULTILINE) def import_to_include(match): if not match.lastindex: return '' path = match.groups(1)[0] afile = File(resource.source_file.parent.child(path)) if len(afile.kind.strip()) == 0: afile = File(afile.path + '.ccss') ref = self.site.content.resource_from_path(afile.path) if not ref: raise HydeException( "Cannot import from path [%s]" % afile.path) ref.is_processable = False return self.template.get_include_statement(ref.relative_path) text = import_finder.sub(import_to_include, text) return text def text_resource_complete(self, resource, text): if not self._should_parse_resource(resource): return return self.clevercss.convert(text, self.settings) class SassyCSSPlugin(Plugin): def __init__(self, site): super(SassyCSSPlugin, self).__init__(site) try: import scss except ImportError as e: raise HydeException('Unable to import pyScss: ' + e.message) else: self.scss = scss def _should_parse_resource(self, resource): return resource.source_file.kind == 'scss' and getattr(resource, 'meta', {}).get('parse', True) @property def options(self): try: mode = self.site.config.mode except AttributeError: mode = "production" debug = mode.startswith('dev') opts = {'compress': not debug, 'debug_info': debug} site_opts = self.settings.get('options', {}) opts.update(site_opts) return opts @property def vars(self): return self.settings.get('vars', {}) @property def includes(self): return self.settings.get('includes', []) def begin_site(self): self.scss.STATIC_URL = self.site.content_url('/') self.scss.STATIC_ROOT = self.site.config.content_root_path.path self.scss.ASSETS_URL = self.site.media_url('/') self.scss.ASSETS_ROOT = self.site.config.deploy_root_path.child( self.site.config.media_root) for resource in self.site.content.walk_resources(): if self._should_parse_resource(resource): new_name = resource.source_file.name_without_extension + ".css" target_folder = File(resource.relative_deploy_path).parent resource.relative_deploy_path = target_folder.child(new_name) def text_resource_complete(self, resource, text): if not self._should_parse_resource(resource): return includes = [resource.node.path] + self.includes includes = [path.rstrip(os.sep) + os.sep for path in includes] options = self.options if 'load_paths' not in options: options['load_paths'] = [] options['load_paths'].extend(includes) scss = self.scss.Scss(scss_opts=options, scss_vars=self.vars) return scss.compile(text) class SassPlugin(Plugin): def __init__(self, site): super(SassPlugin, self).__init__(site) try: import sass except ImportError as e: raise HydeException('Unable to import libsass: ' + e.message) else: self.sass = sass self.resources = [] def _should_parse_resource(self, resource): files = self.site.config.get("sass", {}).get("files", []) return resource.source_file.kind == 'scss' and resource.relative_path in files @property def options(self): try: mode = self.site.config.mode except AttributeError: mode = "production" if 'sass' in self.site.config and 'output_style' in self.site.config.sass: output_style = self.site.config.sass.output_style else: debug = mode.startswith('dev') output_style = 'compressed' if not debug else 'nested' opts = {'output_style': output_style} site_opts = self.settings.get('options', {}) opts.update(site_opts) return opts @property def includes(self): return self.settings.get('includes', []) def begin_site(self): for resource in self.site.content.walk_resources(): if self._should_parse_resource(resource): new_name = resource.source_file.name_without_extension + ".css" target_folder = File(resource.relative_deploy_path).parent resource.relative_deploy_path = target_folder.child(new_name) self.resources.append(resource.relative_path)
MIT License
surrealai/surreal
surreal/learner/base.py
Learner.main_setup
python
def main_setup(self): self.save_config() self.iter_timer.start() self.publish_parameter(0, message='batch '+str(0))
Setup before constant looping
https://github.com/surrealai/surreal/blob/ae9e5f43bdd7d1bc6d39d0a4783b96b2c117fade/surreal/learner/base.py#L356-L362
import os import threading import queue import time import numpy as np from pathlib import Path from benedict import BeneDict import surreal.utils as U from surreal.session import ( TimeThrottledTensorplex, get_loggerplex_client, get_tensorplex_client, Config ) from surreal.distributed import ParameterPublisher, LearnerDataPrefetcher class Learner(metaclass=U.AutoInitializeMeta): def __init__(self, learner_config, env_config, session_config): self.learner_config = learner_config self.env_config = env_config self.session_config = session_config self.current_iter = 0 self._setup_logging() self._setup_checkpoint() def learn(self, batch_exp): raise NotImplementedError def module_dict(self): raise NotImplementedError def save(self, file_path): raise NotImplementedError def checkpoint_attributes(self): return [] def _setup_publish(self): min_publish_interval = self.learner_config.parameter_publish.min_publish_interval self._ps_publish_tracker = U.TimedTracker(min_publish_interval) ps_publish_port = os.environ['SYMPH_PARAMETER_PUBLISH_PORT'] self._ps_publisher = ParameterPublisher( port=ps_publish_port, module_dict=self.module_dict() ) def _setup_prefetching(self): batch_size = self.learner_config.replay.batch_size self._prefetch_queue = LearnerDataPrefetcher( session_config=self.session_config, batch_size=batch_size, worker_preprocess=self._prefetcher_preprocess, main_preprocess=self.preprocess ) self._prefetch_queue.start() def _initialize(self): if self.session_config.checkpoint.restore: self.restore_checkpoint() self._setup_publish() self._setup_prefetching() self._tensorplex_thread.start() def should_publish_parameter(self): return self._ps_publish_tracker.track_increment() def publish_parameter(self, iteration, message=''): self._ps_publisher.publish(iteration, message=message) def fetch_batch(self): return self._prefetch_queue.get() def fetch_iterator(self): while True: yield self.fetch_batch() def _setup_logging(self): self.learn_timer = U.TimeRecorder() self.iter_timer = U.TimeRecorder() self.publish_timer = U.TimeRecorder() self.init_time = time.time() self.current_iter = 0 self.last_time = self.init_time self.last_time_2 = self.init_time self.last_iter = 0 self.log = get_loggerplex_client('learner', self.session_config) self.tensorplex = self._get_tensorplex('learner/learner') self._tensorplex_thread = U.PeriodicWakeUpWorker( target=self.generate_tensorplex_report) def _get_tensorplex(self, name): tp = get_tensorplex_client( name, self.session_config ) update_schedule = self.session_config.tensorplex.update_schedule periodic_tp = TimeThrottledTensorplex( tensorplex=tp, min_update_interval=update_schedule.learner_min_update_interval, ) return periodic_tp def generate_tensorplex_report(self): cur_time = time.time() current_iter = self.current_iter iter_elapsed = current_iter - self.last_iter self.last_iter = current_iter time_elapsed = cur_time - self.last_time self.last_time = cur_time core_metrics = {} system_metrics = {} learn_time = self.learn_timer.avg + 1e-6 fetch_timer = self._prefetch_queue.timer fetch_time = fetch_timer.avg + 1e-6 iter_time = self.iter_timer.avg + 1e-6 publish_time = self.publish_timer.avg + 1e-6 core_metrics['learn_time_s'] = learn_time core_metrics['fetch_time_s'] = fetch_time core_metrics['publish_time_s'] = publish_time core_metrics['iter_time_s'] = iter_time iter_per_s = iter_elapsed / time_elapsed system_metrics['iter_per_s'] = iter_per_s system_metrics['exp_per_s'] = iter_per_s * self.learner_config.replay.batch_size system_metrics['compute_load_percent'] = min( learn_time / iter_time * 100, 100) system_metrics['io_fetch_experience_load_percent'] = min( fetch_time / iter_time * 100, 100) system_metrics['io_publish_load_percent'] = min( publish_time / iter_time * 100, 100) all_metrics = {} for k in core_metrics: all_metrics['.core/' + k] = core_metrics[k] for k in system_metrics: all_metrics['.system/' + k] = system_metrics[k] self.tensorplex.add_scalars(all_metrics) def _setup_checkpoint(self): tracked_attrs = self.checkpoint_attributes() assert U.is_sequence(tracked_attrs), 'checkpoint_attributes must return a list of string attr names' self._periodic_checkpoint = U.PeriodicCheckpoint( U.f_join(self.session_config.folder, 'checkpoint'), name='learner', period=self.session_config.checkpoint.learner.periodic, min_interval=self.session_config.checkpoint.learner.min_interval, tracked_obj=self, tracked_attrs=tracked_attrs, keep_history=self.session_config.checkpoint.learner.keep_history, keep_best=self.session_config.checkpoint.learner.keep_best, ) def periodic_checkpoint(self, global_steps, score=None, **info): return self._periodic_checkpoint.save( score=score, global_steps=global_steps, reload_metadata=False, **info, ) def restore_checkpoint(self): SC = self.session_config restore_folder = SC.checkpoint.restore_folder if (restore_folder and U.f_last_part_in_path(restore_folder) != 'checkpoint'): restore_folder = U.f_join(restore_folder, 'checkpoint') restored = self._periodic_checkpoint.restore( target=SC.checkpoint.learner.restore_target, mode=SC.checkpoint.learner.mode, reload_metadata=True, check_ckpt_exists=True, restore_folder=restore_folder, ) if restored: self.log.info('successfully restored from checkpoint', restored) def preprocess(self, batch): return batch def _prefetcher_preprocess(self, batch): return batch def main(self): self.main_setup() while True: self.main_loop()
MIT License
zehaos/mobilenet
preprocessing/preprocessing_factory.py
get_preprocessing
python
def get_preprocessing(name, is_training=False): preprocessing_fn_map = { 'cifarnet': cifarnet_preprocessing, 'inception': inception_preprocessing, 'inception_v1': inception_preprocessing, 'inception_v2': inception_preprocessing, 'inception_v3': inception_preprocessing, 'inception_v4': inception_preprocessing, 'inception_resnet_v2': inception_preprocessing, 'lenet': lenet_preprocessing, 'resnet_v1_50': vgg_preprocessing, 'resnet_v1_101': vgg_preprocessing, 'resnet_v1_152': vgg_preprocessing, 'resnet_v2_50': vgg_preprocessing, 'resnet_v2_101': vgg_preprocessing, 'resnet_v2_152': vgg_preprocessing, 'vgg': vgg_preprocessing, 'vgg_a': vgg_preprocessing, 'vgg_16': vgg_preprocessing, 'vgg_19': vgg_preprocessing, 'mobilenet': mobilenet_preprocessing, 'mobilenetdet': mobilenetdet_preprocessing } if name not in preprocessing_fn_map: raise ValueError('Preprocessing name [%s] was not recognized' % name) def preprocessing_fn(image, output_height, output_width, **kwargs): return preprocessing_fn_map[name].preprocess_image( image, output_height, output_width, is_training=is_training, **kwargs) return preprocessing_fn
Returns preprocessing_fn(image, height, width, **kwargs). Args: name: The name of the preprocessing function. is_training: `True` if the model is being used for training and `False` otherwise. Returns: preprocessing_fn: A function that preprocessing a single image (pre-batch). It has the following signature: image = preprocessing_fn(image, output_height, output_width, ...). Raises: ValueError: If Preprocessing `name` is not recognized.
https://github.com/zehaos/mobilenet/blob/bb02b10fbd211d717f7a207245feac229f6bb23e/preprocessing/preprocessing_factory.py#L33-L79
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from preprocessing import cifarnet_preprocessing from preprocessing import inception_preprocessing from preprocessing import lenet_preprocessing from preprocessing import vgg_preprocessing from preprocessing import mobilenet_preprocessing from preprocessing import mobilenetdet_preprocessing slim = tf.contrib.slim
Apache License 2.0
onshape-public/onshape-clients
python/onshape_client/oas/models/bt_substitute_approver_info.py
BTSubstituteApproverInfo.__init__
python
def __init__( self, _check_type=True, _from_server=False, _path_to_item=(), _configuration=None, **kwargs ): self._data_store = {} self._check_type = _check_type self._from_server = _from_server self._path_to_item = _path_to_item self._configuration = _configuration for var_name, var_value in six.iteritems(kwargs): if ( var_name not in self.attribute_map and self._configuration is not None and self._configuration.discard_unknown_keys and self.additional_properties_type is None ): continue setattr(self, var_name, var_value)
bt_substitute_approver_info.BTSubstituteApproverInfo - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _from_server (bool): True if the data is from the server False if the data is from the client (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. company_id (str): [optional] # noqa: E501 enabled (bool): [optional] # noqa: E501 identity (bt_identity_info.BTIdentityInfo): [optional] # noqa: E501
https://github.com/onshape-public/onshape-clients/blob/20843a00c628e516e7219e17a23ec4ef2bf9f16f/python/onshape_client/oas/models/bt_substitute_approver_info.py#L110-L154
from __future__ import absolute_import import re import sys import six import nulltype from onshape_client.oas.model_utils import ( ModelComposed, ModelNormal, ModelSimple, date, datetime, file_type, int, none_type, str, validate_get_composed_info, ) try: from onshape_client.oas.models import bt_identity_info except ImportError: bt_identity_info = sys.modules["onshape_client.oas.models.bt_identity_info"] class BTSubstituteApproverInfo(ModelNormal): allowed_values = {} validations = {} additional_properties_type = None @staticmethod def openapi_types(): return { "company_id": (str,), "enabled": (bool,), "identity": (bt_identity_info.BTIdentityInfo,), } @staticmethod def discriminator(): return None attribute_map = { "company_id": "companyId", "enabled": "enabled", "identity": "identity", } @staticmethod def _composed_schemas(): return None required_properties = set( [ "_data_store", "_check_type", "_from_server", "_path_to_item", "_configuration", ] )
MIT License
unofficial-memsource/memsource-cli-client
memsource_cli/models/term_v2_dto.py
TermV2Dto.term_type
python
def term_type(self): return self._term_type
Gets the term_type of this TermV2Dto. # noqa: E501 :return: The term_type of this TermV2Dto. # noqa: E501 :rtype: str
https://github.com/unofficial-memsource/memsource-cli-client/blob/a6639506b74e95476da87f4375953448b76ea90c/memsource_cli/models/term_v2_dto.py#L542-L549
import pprint import re import six from memsource_cli.models.user_reference import UserReference class TermV2Dto(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'id': 'str', 'text': 'str', 'lang': 'str', 'rtl': 'bool', 'modified_at': 'datetime', 'created_at': 'datetime', 'modified_by': 'UserReference', 'created_by': 'UserReference', 'case_sensitive': 'bool', 'exact_match': 'bool', 'forbidden': 'bool', 'preferred': 'bool', 'status': 'str', 'concept_id': 'str', 'usage': 'str', 'note': 'str', 'writable': 'bool', 'short_translation': 'str', 'term_type': 'str', 'part_of_speech': 'str', 'gender': 'str', 'number': 'str' } attribute_map = { 'id': 'id', 'text': 'text', 'lang': 'lang', 'rtl': 'rtl', 'modified_at': 'modifiedAt', 'created_at': 'createdAt', 'modified_by': 'modifiedBy', 'created_by': 'createdBy', 'case_sensitive': 'caseSensitive', 'exact_match': 'exactMatch', 'forbidden': 'forbidden', 'preferred': 'preferred', 'status': 'status', 'concept_id': 'conceptId', 'usage': 'usage', 'note': 'note', 'writable': 'writable', 'short_translation': 'shortTranslation', 'term_type': 'termType', 'part_of_speech': 'partOfSpeech', 'gender': 'gender', 'number': 'number' } def __init__(self, id=None, text=None, lang=None, rtl=None, modified_at=None, created_at=None, modified_by=None, created_by=None, case_sensitive=None, exact_match=None, forbidden=None, preferred=None, status=None, concept_id=None, usage=None, note=None, writable=None, short_translation=None, term_type=None, part_of_speech=None, gender=None, number=None): self._id = None self._text = None self._lang = None self._rtl = None self._modified_at = None self._created_at = None self._modified_by = None self._created_by = None self._case_sensitive = None self._exact_match = None self._forbidden = None self._preferred = None self._status = None self._concept_id = None self._usage = None self._note = None self._writable = None self._short_translation = None self._term_type = None self._part_of_speech = None self._gender = None self._number = None self.discriminator = None if id is not None: self.id = id if text is not None: self.text = text if lang is not None: self.lang = lang if rtl is not None: self.rtl = rtl if modified_at is not None: self.modified_at = modified_at if created_at is not None: self.created_at = created_at if modified_by is not None: self.modified_by = modified_by if created_by is not None: self.created_by = created_by if case_sensitive is not None: self.case_sensitive = case_sensitive if exact_match is not None: self.exact_match = exact_match if forbidden is not None: self.forbidden = forbidden if preferred is not None: self.preferred = preferred if status is not None: self.status = status if concept_id is not None: self.concept_id = concept_id if usage is not None: self.usage = usage if note is not None: self.note = note if writable is not None: self.writable = writable if short_translation is not None: self.short_translation = short_translation if term_type is not None: self.term_type = term_type if part_of_speech is not None: self.part_of_speech = part_of_speech if gender is not None: self.gender = gender if number is not None: self.number = number @property def id(self): return self._id @id.setter def id(self, id): self._id = id @property def text(self): return self._text @text.setter def text(self, text): self._text = text @property def lang(self): return self._lang @lang.setter def lang(self, lang): self._lang = lang @property def rtl(self): return self._rtl @rtl.setter def rtl(self, rtl): self._rtl = rtl @property def modified_at(self): return self._modified_at @modified_at.setter def modified_at(self, modified_at): self._modified_at = modified_at @property def created_at(self): return self._created_at @created_at.setter def created_at(self, created_at): self._created_at = created_at @property def modified_by(self): return self._modified_by @modified_by.setter def modified_by(self, modified_by): self._modified_by = modified_by @property def created_by(self): return self._created_by @created_by.setter def created_by(self, created_by): self._created_by = created_by @property def case_sensitive(self): return self._case_sensitive @case_sensitive.setter def case_sensitive(self, case_sensitive): self._case_sensitive = case_sensitive @property def exact_match(self): return self._exact_match @exact_match.setter def exact_match(self, exact_match): self._exact_match = exact_match @property def forbidden(self): return self._forbidden @forbidden.setter def forbidden(self, forbidden): self._forbidden = forbidden @property def preferred(self): return self._preferred @preferred.setter def preferred(self, preferred): self._preferred = preferred @property def status(self): return self._status @status.setter def status(self, status): allowed_values = ["New", "Approved"] if status not in allowed_values: raise ValueError( "Invalid value for `status` ({0}), must be one of {1}" .format(status, allowed_values) ) self._status = status @property def concept_id(self): return self._concept_id @concept_id.setter def concept_id(self, concept_id): self._concept_id = concept_id @property def usage(self): return self._usage @usage.setter def usage(self, usage): self._usage = usage @property def note(self): return self._note @note.setter def note(self, note): self._note = note @property def writable(self): return self._writable @writable.setter def writable(self, writable): self._writable = writable @property def short_translation(self): return self._short_translation @short_translation.setter def short_translation(self, short_translation): self._short_translation = short_translation @property
Apache License 2.0
cn-uofbasel/picn
PiCN/Layers/NFNLayer/BasicNFNLayer.py
BasicNFNLayer.handleContent
python
def handleContent(self, packet_id: int, content: Content): self.logger.info("Handeling Content: " + str(content.name)) used = self.computation_table.push_data(content) if not used: self.queue_to_lower.put([packet_id, content]) return ready_comps = self.computation_table.get_ready_computations() for comp in ready_comps: if comp.comp_state == NFNComputationState.FWD: self.forwarding_descision(comp.interest) if comp.comp_state == NFNComputationState.EXEC or comp.comp_state == NFNComputationState.WRITEBACK: self.compute(comp.interest)
handle a arriving content object :param packet_id: id of the computation :param content: content that arrived
https://github.com/cn-uofbasel/picn/blob/64ed40242657238e9f1d522d5873173f0b93a30e/PiCN/Layers/NFNLayer/BasicNFNLayer.py#L100-L116
import multiprocessing import random from typing import Dict, List from PiCN.Packets import Interest, Content, Nack, NackReason, Name from PiCN.Processes import LayerProcess from PiCN.Layers.NFNLayer.NFNComputationTable import BaseNFNComputationTable, NFNComputationTableEntry from PiCN.Layers.NFNLayer.NFNComputationTable import NFNComputationState from PiCN.Layers.NFNLayer.NFNExecutor import BaseNFNExecutor from PiCN.Layers.NFNLayer.Parser import * from PiCN.Layers.NFNLayer.NFNOptimizer import BaseNFNOptimizer from PiCN.Layers.NFNLayer.NFNOptimizer import ToDataFirstOptimizer from PiCN.Layers.NFNLayer.R2C import BaseR2CHandler from PiCN.Layers.ICNLayer.PendingInterestTable import BasePendingInterestTable from PiCN.Layers.ICNLayer.ContentStore import BaseContentStore from PiCN.Layers.ICNLayer.ForwardingInformationBase import BaseForwardingInformationBase from PiCN.Layers.LinkLayer.FaceIDTable import BaseFaceIDTable class BasicNFNLayer(LayerProcess): def __init__(self, cs: BaseContentStore, fib: BaseForwardingInformationBase, pit: BasePendingInterestTable, faceidtable: BaseFaceIDTable, comp_table: BaseNFNComputationTable, executors: Dict[str, type(BaseNFNExecutor)], parser: DefaultNFNParser, r2c_client: BaseR2CHandler, log_level: int=255): super().__init__("NFN-Layer", log_level=log_level) self.cs = cs self.fib = fib self.pit = pit self.faceidtable = faceidtable self.computation_table = comp_table self.executors = executors self.r2cclient = r2c_client self.parser: DefaultNFNParser = parser self.optimizer: BaseNFNOptimizer = ToDataFirstOptimizer(self.cs, self.fib, self.pit, self.faceidtable) def data_from_lower(self, to_lower: multiprocessing.Queue, to_higher: multiprocessing.Queue, data): if isinstance(data, list): packet_id = data[0] packet = data[1] else: packet_id = 1 packet = data if isinstance(packet, Interest): self.logger.info("Got Interest from lower: " + str(packet.name) + "; Face ID: " + str(packet_id)) self.handleInterest(packet_id, packet) elif isinstance(packet, Content): self.logger.info("Got Content from lower: " + str(packet.name)) self.handleContent(packet_id, packet) elif isinstance(packet, Nack): self.logger.info("Got Nack from lower: " + str(packet.name)) self.handleNack(packet_id, packet) def data_from_higher(self, to_lower: multiprocessing.Queue, to_higher: multiprocessing.Queue, data): pass def handleInterest(self, packet_id: int, interest: Interest): if self.r2cclient.R2C_identify_Name(interest.name): c = self.r2cclient.R2C_handle_request(interest.name, self.computation_table) if c is not None: if packet_id < 0: self.computation_table.push_data(c) else: self.queue_to_lower.put([packet_id, c]) return if interest.name.components[-1] != b"NFN": self.queue_to_lower.put([packet_id, interest]) return nfn_str, prepended_name = self.parser.network_name_to_nfn_str(interest.name) ast = self.parser.parse(nfn_str) if self.computation_table.add_computation(interest.name, packet_id, interest, ast) == False: self.logger.info("Computation already running") return self.logger.info("#Running Computations: " + str(self.computation_table.get_container_size())) required_optimizer_data = self.optimizer.required_data(interest.name, ast) self.computation_table.update_status(interest.name, NFNComputationState.FWD) if required_optimizer_data != []: raise NotImplemented("Global Optimizing not implemeted yet") return self.forwarding_descision(interest)
BSD 3-Clause New or Revised License
rlworkgroup/garage
tests/fixtures/policies/dummy_policy.py
DummyPolicy.get_actions
python
def get_actions(self, observations): n = len(observations) action, action_info = self.get_action(None) return [action] * n, action_info
Get multiple actions from this policy for the input observations. Args: observations (numpy.ndarray): Observations from environment. Returns: numpy.ndarray: Predicted actions. dict: Distribution parameters.
https://github.com/rlworkgroup/garage/blob/3a578852c392cecde5b7c9786aa182d74f6df1d4/tests/fixtures/policies/dummy_policy.py#L34-L47
import numpy as np from garage.np.policies import Policy class DummyPolicy(Policy): def __init__(self, env_spec): self._env_spec = env_spec self._param = [] self._param_values = np.random.uniform(-1, 1, 1000) def get_action(self, observation): return self.action_space.sample(), dict(dummy='dummy', mean=0.)
MIT License
sripathikrishnan/redis-rdb-tools
rdbtools/parser.py
RdbParser._decode_module_id
python
def _decode_module_id(self, module_id): name = [''] * 9 module_id >>= 10 for i in reversed(range(9)): name[i] = self.charset[module_id & 63] module_id >>= 6 return ''.join(name)
decode module id to string based on @antirez moduleTypeNameByID function from redis/src/module.c :param module_id: 64bit integer :return: string
https://github.com/sripathikrishnan/redis-rdb-tools/blob/548b11ec3c81a603f5b321228d07a61a0b940159/rdbtools/parser.py#L942-L954
import struct import io import datetime import re from rdbtools.encodehelpers import STRING_ESCAPE_RAW, apply_escape_bytes, bval from .compat import range, str2regexp from .iowrapper import IOWrapper try: try: from cStringIO import StringIO as BytesIO except ImportError: from StringIO import StringIO as BytesIO except ImportError: from io import BytesIO try: import lzf HAS_PYTHON_LZF = True except ImportError: HAS_PYTHON_LZF = False REDIS_RDB_6BITLEN = 0 REDIS_RDB_14BITLEN = 1 REDIS_RDB_32BITLEN = 0x80 REDIS_RDB_64BITLEN = 0x81 REDIS_RDB_ENCVAL = 3 REDIS_RDB_OPCODE_MODULE_AUX = 247 REDIS_RDB_OPCODE_IDLE = 248 REDIS_RDB_OPCODE_FREQ = 249 REDIS_RDB_OPCODE_AUX = 250 REDIS_RDB_OPCODE_RESIZEDB = 251 REDIS_RDB_OPCODE_EXPIRETIME_MS = 252 REDIS_RDB_OPCODE_EXPIRETIME = 253 REDIS_RDB_OPCODE_SELECTDB = 254 REDIS_RDB_OPCODE_EOF = 255 REDIS_RDB_TYPE_STRING = 0 REDIS_RDB_TYPE_LIST = 1 REDIS_RDB_TYPE_SET = 2 REDIS_RDB_TYPE_ZSET = 3 REDIS_RDB_TYPE_HASH = 4 REDIS_RDB_TYPE_ZSET_2 = 5 REDIS_RDB_TYPE_MODULE = 6 REDIS_RDB_TYPE_MODULE_2 = 7 REDIS_RDB_TYPE_HASH_ZIPMAP = 9 REDIS_RDB_TYPE_LIST_ZIPLIST = 10 REDIS_RDB_TYPE_SET_INTSET = 11 REDIS_RDB_TYPE_ZSET_ZIPLIST = 12 REDIS_RDB_TYPE_HASH_ZIPLIST = 13 REDIS_RDB_TYPE_LIST_QUICKLIST = 14 REDIS_RDB_TYPE_STREAM_LISTPACKS = 15 REDIS_RDB_ENC_INT8 = 0 REDIS_RDB_ENC_INT16 = 1 REDIS_RDB_ENC_INT32 = 2 REDIS_RDB_ENC_LZF = 3 REDIS_RDB_MODULE_OPCODE_EOF = 0 REDIS_RDB_MODULE_OPCODE_SINT = 1 REDIS_RDB_MODULE_OPCODE_UINT = 2 REDIS_RDB_MODULE_OPCODE_FLOAT = 3 REDIS_RDB_MODULE_OPCODE_DOUBLE = 4 REDIS_RDB_MODULE_OPCODE_STRING = 5 DATA_TYPE_MAPPING = { 0 : "string", 1 : "list", 2 : "set", 3 : "sortedset", 4 : "hash", 5 : "sortedset", 6 : "module", 7: "module", 9 : "hash", 10 : "list", 11 : "set", 12 : "sortedset", 13 : "hash", 14 : "list", 15 : "stream"} class RdbCallback(object): def __init__(self, string_escape): if string_escape is None: self._escape = STRING_ESCAPE_RAW else: self._escape = string_escape def encode_key(self, key): return apply_escape_bytes(key, self._escape, skip_printable=True) def encode_value(self, val): return apply_escape_bytes(val, self._escape) def start_rdb(self): pass def aux_field(self, key, value): pass def start_database(self, db_number): pass def start_module(self, key, module_name, expiry, info): return False def handle_module_data(self, key, opcode, data): pass def end_module(self, key, buffer_size, buffer=None): pass def db_size(self, db_size, expires_size): pass def set(self, key, value, expiry, info): pass def start_hash(self, key, length, expiry, info): pass def hset(self, key, field, value): pass def end_hash(self, key): pass def start_set(self, key, cardinality, expiry, info): pass def sadd(self, key, member): pass def end_set(self, key): pass def start_list(self, key, expiry, info): pass def rpush(self, key, value): pass def end_list(self, key, info): pass def start_sorted_set(self, key, length, expiry, info): pass def zadd(self, key, score, member): pass def end_sorted_set(self, key): pass def start_stream(self, key, listpacks_count, expiry, info): pass def stream_listpack(self, key, entry_id, data): pass def end_stream(self, key, items, last_entry_id, cgroups): pass def end_database(self, db_number): pass def end_rdb(self): pass class RdbParser(object): def __init__(self, callback, filters = None) : self._callback = callback self._key = None self._expiry = None self._idle = None self._freq = None self.init_filter(filters) self._rdb_version = 0 def parse(self, filename): self.parse_fd(open(filename, "rb")) def parse_fd(self, fd): with fd as f: self.verify_magic_string(f.read(5)) self.verify_version(f.read(4)) self._callback.start_rdb() is_first_database = True db_number = 0 while True : self._expiry = None self._idle = None self._freq = None data_type = read_unsigned_char(f) if data_type == REDIS_RDB_OPCODE_EXPIRETIME_MS : self._expiry = read_milliseconds_time(f) data_type = read_unsigned_char(f) elif data_type == REDIS_RDB_OPCODE_EXPIRETIME : self._expiry = to_datetime(read_unsigned_int(f) * 1000000) data_type = read_unsigned_char(f) if data_type == REDIS_RDB_OPCODE_IDLE: self._idle = self.read_length(f) data_type = read_unsigned_char(f) if data_type == REDIS_RDB_OPCODE_FREQ: self._freq = read_unsigned_char(f) data_type = read_unsigned_char(f) if data_type == REDIS_RDB_OPCODE_SELECTDB : if not is_first_database : self._callback.end_database(db_number) is_first_database = False db_number = self.read_length(f) self._callback.start_database(db_number) continue if data_type == REDIS_RDB_OPCODE_AUX: aux_key = self.read_string(f) aux_val = self.read_string(f) ret = self._callback.aux_field(aux_key, aux_val) if ret: break continue if data_type == REDIS_RDB_OPCODE_RESIZEDB: db_size = self.read_length(f) expire_size = self.read_length(f) self._callback.db_size(db_size, expire_size) continue if data_type == REDIS_RDB_OPCODE_MODULE_AUX: self.read_module(f) continue if data_type == REDIS_RDB_OPCODE_EOF: self._callback.end_database(db_number) self._callback.end_rdb() if self._rdb_version >= 5: f.read(8) break if self.matches_filter(db_number): self._key = self.read_string(f) if self.matches_filter(db_number, self._key, data_type): self.read_object(f, data_type) else: self.skip_object(f, data_type) else : self.skip_key_and_object(f, data_type) self._key = None def read_length_with_encoding(self, f): length = 0 is_encoded = False bytes = [] bytes.append(read_unsigned_char(f)) enc_type = (bytes[0] & 0xC0) >> 6 if enc_type == REDIS_RDB_ENCVAL: is_encoded = True length = bytes[0] & 0x3F elif enc_type == REDIS_RDB_6BITLEN: length = bytes[0] & 0x3F elif enc_type == REDIS_RDB_14BITLEN: bytes.append(read_unsigned_char(f)) length = ((bytes[0] & 0x3F) << 8) | bytes[1] elif bytes[0] == REDIS_RDB_32BITLEN: length = read_unsigned_int_be(f) elif bytes[0] == REDIS_RDB_64BITLEN: length = read_unsigned_long_be(f) else: raise Exception('read_length_with_encoding', "Invalid string encoding %s (encoding byte 0x%X)" % (enc_type, bytes[0])) return (length, is_encoded) def read_length(self, f) : return self.read_length_with_encoding(f)[0] def read_string(self, f) : tup = self.read_length_with_encoding(f) length = tup[0] is_encoded = tup[1] val = None if is_encoded : if length == REDIS_RDB_ENC_INT8 : val = read_signed_char(f) elif length == REDIS_RDB_ENC_INT16 : val = read_signed_short(f) elif length == REDIS_RDB_ENC_INT32 : val = read_signed_int(f) elif length == REDIS_RDB_ENC_LZF : clen = self.read_length(f) l = self.read_length(f) val = self.lzf_decompress(f.read(clen), l) else: raise Exception('read_string', "Invalid string encoding %s"%(length)) else : val = f.read(length) return val def read_float(self, f): dbl_length = read_unsigned_char(f) if dbl_length == 253: return float('nan') elif dbl_length == 254: return float('inf') elif dbl_length == 255: return float('-inf') data = f.read(dbl_length) if isinstance(data, str): return float(data) return data def read_object(self, f, enc_type) : if enc_type == REDIS_RDB_TYPE_STRING : val = self.read_string(f) self._callback.set(self._key, val, self._expiry, info={'encoding':'string','idle':self._idle,'freq':self._freq}) elif enc_type == REDIS_RDB_TYPE_LIST : length = self.read_length(f) self._callback.start_list(self._key, self._expiry, info={'encoding':'linkedlist','idle':self._idle,'freq':self._freq}) for count in range(0, length) : val = self.read_string(f) self._callback.rpush(self._key, val) self._callback.end_list(self._key, info={'encoding':'linkedlist' }) elif enc_type == REDIS_RDB_TYPE_SET: length = self.read_length(f) self._callback.start_set(self._key, length, self._expiry, info={'encoding':'hashtable','idle':self._idle,'freq':self._freq}) for count in range(0, length): val = self.read_string(f) self._callback.sadd(self._key, val) self._callback.end_set(self._key) elif enc_type == REDIS_RDB_TYPE_ZSET or enc_type == REDIS_RDB_TYPE_ZSET_2 : length = self.read_length(f) self._callback.start_sorted_set(self._key, length, self._expiry, info={'encoding':'skiplist','idle':self._idle,'freq':self._freq}) for count in range(0, length): val = self.read_string(f) score = read_binary_double(f) if enc_type == REDIS_RDB_TYPE_ZSET_2 else self.read_float(f) self._callback.zadd(self._key, score, val) self._callback.end_sorted_set(self._key) elif enc_type == REDIS_RDB_TYPE_HASH: length = self.read_length(f) self._callback.start_hash(self._key, length, self._expiry, info={'encoding':'hashtable','idle':self._idle,'freq':self._freq}) for count in range(0, length): field = self.read_string(f) value = self.read_string(f) self._callback.hset(self._key, field, value) self._callback.end_hash(self._key) elif enc_type == REDIS_RDB_TYPE_HASH_ZIPMAP: self.read_zipmap(f) elif enc_type == REDIS_RDB_TYPE_LIST_ZIPLIST: self.read_ziplist(f) elif enc_type == REDIS_RDB_TYPE_SET_INTSET: self.read_intset(f) elif enc_type == REDIS_RDB_TYPE_ZSET_ZIPLIST: self.read_zset_from_ziplist(f) elif enc_type == REDIS_RDB_TYPE_HASH_ZIPLIST: self.read_hash_from_ziplist(f) elif enc_type == REDIS_RDB_TYPE_LIST_QUICKLIST: self.read_list_from_quicklist(f) elif enc_type == REDIS_RDB_TYPE_MODULE: raise Exception('read_object', 'Unable to read Redis Modules RDB objects (key %s)' % self._key) elif enc_type == REDIS_RDB_TYPE_MODULE_2: self.read_module(f) elif enc_type == REDIS_RDB_TYPE_STREAM_LISTPACKS: self.read_stream(f) else: raise Exception('read_object', 'Invalid object type %d for key %s' % (enc_type, self._key)) def skip_key_and_object(self, f, data_type): self.skip_string(f) self.skip_object(f, data_type) def skip_string(self, f): tup = self.read_length_with_encoding(f) length = tup[0] is_encoded = tup[1] bytes_to_skip = 0 if is_encoded : if length == REDIS_RDB_ENC_INT8 : bytes_to_skip = 1 elif length == REDIS_RDB_ENC_INT16 : bytes_to_skip = 2 elif length == REDIS_RDB_ENC_INT32 : bytes_to_skip = 4 elif length == REDIS_RDB_ENC_LZF : clen = self.read_length(f) l = self.read_length(f) bytes_to_skip = clen else : bytes_to_skip = length skip(f, bytes_to_skip) def skip_float(self, f): dbl_length = read_unsigned_char(f) if dbl_length < 253: skip(f, dbl_length) def skip_binary_double(self, f): skip(f, 8) def skip_object(self, f, enc_type): skip_strings = 0 if enc_type == REDIS_RDB_TYPE_STRING : skip_strings = 1 elif enc_type == REDIS_RDB_TYPE_LIST : skip_strings = self.read_length(f) elif enc_type == REDIS_RDB_TYPE_SET : skip_strings = self.read_length(f) elif enc_type == REDIS_RDB_TYPE_ZSET or enc_type == REDIS_RDB_TYPE_ZSET_2 : length = self.read_length(f) for x in range(length): self.skip_string(f) self.skip_binary_double(f) if enc_type == REDIS_RDB_TYPE_ZSET_2 else self.skip_float(f) elif enc_type == REDIS_RDB_TYPE_HASH : skip_strings = self.read_length(f) * 2 elif enc_type == REDIS_RDB_TYPE_HASH_ZIPMAP : skip_strings = 1 elif enc_type == REDIS_RDB_TYPE_LIST_ZIPLIST : skip_strings = 1 elif enc_type == REDIS_RDB_TYPE_SET_INTSET : skip_strings = 1 elif enc_type == REDIS_RDB_TYPE_ZSET_ZIPLIST : skip_strings = 1 elif enc_type == REDIS_RDB_TYPE_HASH_ZIPLIST : skip_strings = 1 elif enc_type == REDIS_RDB_TYPE_LIST_QUICKLIST: skip_strings = self.read_length(f) elif enc_type == REDIS_RDB_TYPE_MODULE: raise Exception('skip_object', 'Unable to skip Redis Modules RDB objects (key %s)' % self._key) elif enc_type == REDIS_RDB_TYPE_MODULE_2: self.skip_module(f) elif enc_type == REDIS_RDB_TYPE_STREAM_LISTPACKS: self.skip_stream(f) else: raise Exception('skip_object', 'Invalid object type %d for key %s' % (enc_type, self._key)) for x in range(0, skip_strings): self.skip_string(f) def read_intset(self, f) : raw_string = self.read_string(f) buff = BytesIO(raw_string) encoding = read_unsigned_int(buff) num_entries = read_unsigned_int(buff) self._callback.start_set(self._key, num_entries, self._expiry, info={'encoding':'intset', 'sizeof_value':len(raw_string),'idle':self._idle,'freq':self._freq}) for x in range(0, num_entries) : if encoding == 8 : entry = read_signed_long(buff) elif encoding == 4 : entry = read_signed_int(buff) elif encoding == 2 : entry = read_signed_short(buff) else : raise Exception('read_intset', 'Invalid encoding %d for key %s' % (encoding, self._key)) self._callback.sadd(self._key, entry) self._callback.end_set(self._key) def read_ziplist(self, f) : raw_string = self.read_string(f) buff = BytesIO(raw_string) zlbytes = read_unsigned_int(buff) tail_offset = read_unsigned_int(buff) num_entries = read_unsigned_short(buff) self._callback.start_list(self._key, self._expiry, info={'encoding':'ziplist', 'sizeof_value':len(raw_string),'idle':self._idle,'freq':self._freq}) for x in range(0, num_entries) : val = self.read_ziplist_entry(buff) self._callback.rpush(self._key, val) zlist_end = read_unsigned_char(buff) if zlist_end != 255 : raise Exception('read_ziplist', "Invalid zip list end - %d for key %s" % (zlist_end, self._key)) self._callback.end_list(self._key, info={'encoding':'ziplist'}) def read_list_from_quicklist(self, f): count = self.read_length(f) total_size = 0 self._callback.start_list(self._key, self._expiry, info={'encoding': 'quicklist', 'zips': count,'idle':self._idle,'freq':self._freq}) for i in range(0, count): raw_string = self.read_string(f) total_size += len(raw_string) buff = BytesIO(raw_string) zlbytes = read_unsigned_int(buff) tail_offset = read_unsigned_int(buff) num_entries = read_unsigned_short(buff) for x in range(0, num_entries): self._callback.rpush(self._key, self.read_ziplist_entry(buff)) zlist_end = read_unsigned_char(buff) if zlist_end != 255: raise Exception('read_quicklist', "Invalid zip list end - %d for key %s" % (zlist_end, self._key)) self._callback.end_list(self._key, info={'encoding': 'quicklist', 'zips': count, 'sizeof_value': total_size}) def read_zset_from_ziplist(self, f) : raw_string = self.read_string(f) buff = BytesIO(raw_string) zlbytes = read_unsigned_int(buff) tail_offset = read_unsigned_int(buff) num_entries = read_unsigned_short(buff) if (num_entries % 2) : raise Exception('read_zset_from_ziplist', "Expected even number of elements, but found %d for key %s" % (num_entries, self._key)) num_entries = num_entries // 2 self._callback.start_sorted_set(self._key, num_entries, self._expiry, info={'encoding':'ziplist', 'sizeof_value':len(raw_string),'idle':self._idle,'freq':self._freq}) for x in range(0, num_entries) : member = self.read_ziplist_entry(buff) score = self.read_ziplist_entry(buff) if isinstance(score, bytes) : score = float(score) self._callback.zadd(self._key, score, member) zlist_end = read_unsigned_char(buff) if zlist_end != 255 : raise Exception('read_zset_from_ziplist', "Invalid zip list end - %d for key %s" % (zlist_end, self._key)) self._callback.end_sorted_set(self._key) def read_hash_from_ziplist(self, f) : raw_string = self.read_string(f) buff = BytesIO(raw_string) zlbytes = read_unsigned_int(buff) tail_offset = read_unsigned_int(buff) num_entries = read_unsigned_short(buff) if (num_entries % 2) : raise Exception('read_hash_from_ziplist', "Expected even number of elements, but found %d for key %s" % (num_entries, self._key)) num_entries = num_entries // 2 self._callback.start_hash(self._key, num_entries, self._expiry, info={'encoding':'ziplist', 'sizeof_value':len(raw_string),'idle':self._idle,'freq':self._freq}) for x in range(0, num_entries) : field = self.read_ziplist_entry(buff) value = self.read_ziplist_entry(buff) self._callback.hset(self._key, field, value) zlist_end = read_unsigned_char(buff) if zlist_end != 255 : raise Exception('read_hash_from_ziplist', "Invalid zip list end - %d for key %s" % (zlist_end, self._key)) self._callback.end_hash(self._key) def read_ziplist_entry(self, f) : length = 0 value = None prev_length = read_unsigned_char(f) if prev_length == 254 : prev_length = read_unsigned_int(f) entry_header = read_unsigned_char(f) if (entry_header >> 6) == 0 : length = entry_header & 0x3F value = f.read(length) elif (entry_header >> 6) == 1 : length = ((entry_header & 0x3F) << 8) | read_unsigned_char(f) value = f.read(length) elif (entry_header >> 6) == 2 : length = read_unsigned_int_be(f) value = f.read(length) elif (entry_header >> 4) == 12 : value = read_signed_short(f) elif (entry_header >> 4) == 13 : value = read_signed_int(f) elif (entry_header >> 4) == 14 : value = read_signed_long(f) elif (entry_header == 240) : value = read_24bit_signed_number(f) elif (entry_header == 254) : value = read_signed_char(f) elif (entry_header >= 241 and entry_header <= 253) : value = entry_header - 241 else : raise Exception('read_ziplist_entry', 'Invalid entry_header %d for key %s' % (entry_header, self._key)) return value def read_zipmap(self, f) : raw_string = self.read_string(f) buff = io.BytesIO(bytearray(raw_string)) num_entries = read_unsigned_char(buff) self._callback.start_hash(self._key, num_entries, self._expiry, info={'encoding':'zipmap', 'sizeof_value':len(raw_string),'idle':self._idle,'freq':self._freq}) while True : next_length = self.read_zipmap_next_length(buff) if next_length is None : break key = buff.read(next_length) next_length = self.read_zipmap_next_length(buff) if next_length is None : raise Exception('read_zip_map', 'Unexepcted end of zip map for key %s' % self._key) free = read_unsigned_char(buff) value = buff.read(next_length) try: value = int(value) except ValueError: pass skip(buff, free) self._callback.hset(self._key, key, value) self._callback.end_hash(self._key) def read_zipmap_next_length(self, f) : num = read_unsigned_char(f) if num < 254: return num elif num == 254: return read_unsigned_int(f) else: return None def skip_module(self, f): self.read_length_with_encoding(f) opcode = self.read_length(f) while opcode != REDIS_RDB_MODULE_OPCODE_EOF: if opcode == REDIS_RDB_MODULE_OPCODE_SINT or opcode == REDIS_RDB_MODULE_OPCODE_UINT: self.read_length(f) elif opcode == REDIS_RDB_MODULE_OPCODE_FLOAT: read_binary_float(f) elif opcode == REDIS_RDB_MODULE_OPCODE_DOUBLE: read_binary_double(f) elif opcode == REDIS_RDB_MODULE_OPCODE_STRING: self.skip_string(f) else: raise Exception("Unknown module opcode %s" % opcode) opcode = self.read_length(f) def read_module(self, f): iowrapper = IOWrapper(f) iowrapper.start_recording_size() iowrapper.start_recording() length, encoding = self.read_length_with_encoding(iowrapper) record_buffer = self._callback.start_module(self._key, self._decode_module_id(length), self._expiry, info={'idle':self._idle, 'freq':self._freq}) if not record_buffer: iowrapper.stop_recording() opcode = self.read_length(iowrapper) while opcode != REDIS_RDB_MODULE_OPCODE_EOF: if opcode == REDIS_RDB_MODULE_OPCODE_SINT or opcode == REDIS_RDB_MODULE_OPCODE_UINT: data = self.read_length(iowrapper) elif opcode == REDIS_RDB_MODULE_OPCODE_FLOAT: data = read_binary_float(iowrapper) elif opcode == REDIS_RDB_MODULE_OPCODE_DOUBLE: data = read_binary_double(iowrapper) elif opcode == REDIS_RDB_MODULE_OPCODE_STRING: data = self.read_string(iowrapper) else: raise Exception("Unknown module opcode %s" % opcode) self._callback.handle_module_data(self._key, opcode, data) opcode = self.read_length(iowrapper) buffer = None if record_buffer: buffer = struct.pack('B', REDIS_RDB_TYPE_MODULE_2) + iowrapper.get_recorded_buffer() iowrapper.stop_recording() self._callback.end_module(self._key, buffer_size=iowrapper.get_recorded_size(), buffer=buffer) def skip_stream(self, f): listpacks = self.read_length(f) for _lp in range(listpacks): self.skip_string(f) self.skip_string(f) self.read_length(f) self.read_length(f) self.read_length(f) cgroups = self.read_length(f) for _cg in range(cgroups): self.skip_string(f) self.read_length(f) self.read_length(f) pending = self.read_length(f) for _pel in range(pending): f.read(16) f.read(8) self.read_length(f) consumers = self.read_length(f) for _c in range(consumers): self.skip_string(f) f.read(8) pending = self.read_length(f) f.read(pending*16) def read_stream(self, f): listpacks = self.read_length(f) self._callback.start_stream(self._key, listpacks, self._expiry, info={'encoding': 'listpack', 'idle': self._idle, 'freq': self._freq}) for _lp in range(listpacks): self._callback.stream_listpack(self._key, self.read_string(f), self.read_string(f)) items = self.read_length(f) last_entry_id = "%s-%s" % (self.read_length(f), self.read_length(f)) cgroups = self.read_length(f) cgroups_data = [] for _cg in range(cgroups): cgname = self.read_string(f) last_cg_entry_id = "%s-%s" % (self.read_length(f), self.read_length(f)) pending = self.read_length(f) group_pending_entries = [] for _pel in range(pending): eid = f.read(16) delivery_time = read_milliseconds_time(f) delivery_count = self.read_length(f) group_pending_entries.append({'id': eid, 'delivery_time': delivery_time, 'delivery_count': delivery_count}) consumers = self.read_length(f) consumers_data = [] for _c in range(consumers): cname = self.read_string(f) seen_time = read_milliseconds_time(f) pending = self.read_length(f) consumer_pending_entries = [] for _pel in range( pending): eid = f.read(16) consumer_pending_entries.append({'id': eid}) consumers_data.append({'name': cname, 'seen_time': seen_time, 'pending': consumer_pending_entries}) cgroups_data.append({'name': cgname, 'last_entry_id': last_cg_entry_id, 'pending': group_pending_entries, 'consumers': consumers_data}) self._callback.end_stream(self._key, items, last_entry_id, cgroups_data) charset = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'
MIT License
blent-ai/pycopula
pycopula/copula.py
Copula.cdf
python
def cdf(self, x): self._check_dimension(x) if self.name == 'indep': return np.prod(x) elif self.name == 'frechet_up': return min(x) elif self.name == 'frechet_down': return max(sum(x) - self.dim + 1., 0)
Returns the cumulative distribution function (CDF) of the copula. Parameters ---------- x : numpy array (of size d) Values to compute CDF.
https://github.com/blent-ai/pycopula/blob/697e2fb6a6717696d6bb7876990fa78f31456a8e/pycopula/copula.py#L112-L127
__author__ = "Maxime Jumelle" __license__ = "Apache 2.0" __maintainer__ = "Maxime Jumelle" __email__ = "maxime@aipcloud.io" from . import archimedean_generators as generators from . import math_misc from .math_misc import multivariate_t_distribution from . import estimation import numpy as np from numpy.linalg import inv import scipy import scipy.misc from scipy.stats import kendalltau, pearsonr, spearmanr, norm, t, multivariate_normal from scipy.linalg import sqrtm from scipy.optimize import fsolve import scipy.integrate as integrate class Copula(): def __init__(self, dim=2, name='indep'): if dim < 2 or int(dim) != dim: raise ValueError("Copula dimension must be an integer greater than 1.") self.dim = dim self.name = name self.kendall = None self.pearson = None self.spearman = None def __str__(self): return "Copula ({0}).".format(self.name) def _check_dimension(self, x): if len(x) != self.dim: raise ValueError("Expected vector of dimension {0}, get vector of dimension {1}".format(self.dim, len(x))) def dimension(self): return self.dim def correlations(self, X): if self.dim != 2: raise Exception("Correlations can not be computed when dimension is greater than 2.") self.kendall = kendalltau(X[:,0], X[:,1])[0] self.pearson = pearsonr(X[:,0], X[:,1])[0] self.spearman = spearmanr(X[:,0], X[:,1])[0] return self.kendall, self.pearson, self.spearman def kendall(self): if self.kendall == None: raise ValueError("You must compute correlations before accessing to Kendall's tau.") return self.kendall def pearson(self): if self.pearson == None: raise ValueError("You must compute correlations before accessing to Pearson's r.") return self.pearson def spearman(self): if self.pearson == None: raise ValueError("You must compute correlations before accessing to Spearman's rho.") return self.spearman
Apache License 2.0
google/mobly
mobly/expects.py
_ExpectErrorRecorder.has_error
python
def has_error(self): return self._count > 0
If any error has been recorded since the last reset.
https://github.com/google/mobly/blob/542a78a7198256d172f56546ab8a6493166b3d9b/mobly/expects.py#L53-L55
import contextlib import logging import time from mobly import asserts from mobly import records from mobly import signals DEFAULT_TEST_RESULT_RECORD = records.TestResultRecord('mobly', 'global') class _ExpectErrorRecorder: def __init__(self, record=None): self.reset_internal_states(record=record) def reset_internal_states(self, record=None): self._record = None self._count = 0 self._record = record @property
Apache License 2.0
tomplus/kubernetes_asyncio
kubernetes_asyncio/client/models/v1_resource_field_selector.py
V1ResourceFieldSelector.resource
python
def resource(self): return self._resource
Gets the resource of this V1ResourceFieldSelector. # noqa: E501 Required: resource to select # noqa: E501 :return: The resource of this V1ResourceFieldSelector. # noqa: E501 :rtype: str
https://github.com/tomplus/kubernetes_asyncio/blob/22bf0f4ec775b920abc9cee86bb38abcfc57506d/kubernetes_asyncio/client/models/v1_resource_field_selector.py#L111-L119
import pprint import re import six from kubernetes_asyncio.client.configuration import Configuration class V1ResourceFieldSelector(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'container_name': 'str', 'divisor': 'str', 'resource': 'str' } attribute_map = { 'container_name': 'containerName', 'divisor': 'divisor', 'resource': 'resource' } def __init__(self, container_name=None, divisor=None, resource=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._container_name = None self._divisor = None self._resource = None self.discriminator = None if container_name is not None: self.container_name = container_name if divisor is not None: self.divisor = divisor self.resource = resource @property def container_name(self): return self._container_name @container_name.setter def container_name(self, container_name): self._container_name = container_name @property def divisor(self): return self._divisor @divisor.setter def divisor(self, divisor): self._divisor = divisor @property
Apache License 2.0
trevor/calendarserver
txdav/caldav/datastore/scheduling/caldav/scheduler.py
CalDAVScheduler.checkOrganizer
python
def checkOrganizer(self): organizer = self.calendar.getOrganizer() if organizer: organizerAddress = yield calendarUserFromCalendarUserAddress(organizer, self.txn) if organizerAddress.hosted(): if organizerAddress.validOriginator(): if self.checkForFreeBusy() and not organizerAddress.record.enabledAsOrganizer(): log.error("ORGANIZER not allowed to be an Organizer: {cal}", cal=self.calendar,) raise HTTPError(self.errorResponse( responsecode.FORBIDDEN, self.errorElements["organizer-denied"], "Organizer cannot schedule", )) self.organizer = organizerAddress else: log.error("No scheduling for ORGANIZER: {o}", o=organizer,) raise HTTPError(self.errorResponse( responsecode.FORBIDDEN, self.errorElements["organizer-denied"], "Organizer cannot schedule", )) else: localUser = (yield addressmapping.mapper.isCalendarUserInMyDomain(organizer)) if localUser: log.error("No principal for ORGANIZER in calendar data: {cal}", cal=self.calendar,) raise HTTPError(self.errorResponse( responsecode.FORBIDDEN, self.errorElements["organizer-denied"], "No principal for organizer", )) else: self.organizer = organizerAddress else: log.error("ORGANIZER missing in calendar data: {cal}", cal=self.calendar,) raise HTTPError(self.errorResponse( responsecode.FORBIDDEN, self.errorElements["invalid-scheduling-message"], "Missing organizer", ))
Check the validity of the ORGANIZER value. ORGANIZER must be local.
https://github.com/trevor/calendarserver/blob/c9970b06a70445ca75b62e3d170c26bc897a035e/txdav/caldav/datastore/scheduling/caldav/scheduler.py#L167-L215
from twext.python.log import Logger from txweb2 import responsecode from txweb2.dav.http import ErrorResponse from txweb2.http import HTTPError, StatusResponse from twisted.internet.defer import inlineCallbacks from twistedcaldav.caldavxml import caldav_namespace from twistedcaldav.config import config from txdav.caldav.datastore.scheduling import addressmapping from txdav.caldav.datastore.scheduling.cuaddress import LocalCalendarUser, OtherServerCalendarUser, InvalidCalendarUser, calendarUserFromCalendarUserAddress from txdav.caldav.datastore.scheduling.scheduler import Scheduler, ScheduleResponseQueue __all__ = [ "CalDAVScheduler", ] log = Logger() class CalDAVScheduler(Scheduler): scheduleResponse = ScheduleResponseQueue errorResponse = ErrorResponse errorElements = { "originator-missing": (caldav_namespace, "originator-specified"), "originator-invalid": (caldav_namespace, "originator-allowed"), "originator-denied": (caldav_namespace, "originator-allowed"), "recipient-missing": (caldav_namespace, "recipient-specified"), "recipient-invalid": (caldav_namespace, "recipient-exists"), "organizer-denied": (caldav_namespace, "organizer-allowed"), "attendee-denied": (caldav_namespace, "attendee-allowed"), "invalid-calendar-data-type": (caldav_namespace, "supported-calendar-data"), "invalid-calendar-data": (caldav_namespace, "valid-calendar-data"), "invalid-scheduling-message": (caldav_namespace, "valid-calendar-data"), "max-recipients": (caldav_namespace, "recipient-limit"), } def __init__(self, txn, originator_uid, **kwargs): super(CalDAVScheduler, self).__init__(txn, originator_uid, **kwargs) self.doingPOST = False def doSchedulingViaPOST(self, originator, recipients, calendar): self.doingPOST = True return super(CalDAVScheduler, self).doSchedulingViaPOST(originator, recipients, calendar) def checkAuthorization(self): if not self.internal_request and self.originator_uid == None: log.error( "Unauthenticated originators not allowed: {o}", o=self.originator, ) raise HTTPError(self.errorResponse( responsecode.FORBIDDEN, self.errorElements["originator-denied"], "Invalid originator", )) @inlineCallbacks def checkOriginator(self): originatorAddress = yield calendarUserFromCalendarUserAddress(self.originator, self.txn) if not originatorAddress.hosted(): log.error( "Could not find principal for originator: {o}", o=self.originator, ) raise HTTPError(self.errorResponse( responsecode.FORBIDDEN, self.errorElements["originator-denied"], "No principal for originator", )) else: if not originatorAddress.validOriginator() or isinstance(originatorAddress, OtherServerCalendarUser): log.error( "Originator not enabled or hosted on this server: {o}", o=self.originator, ) raise HTTPError(self.errorResponse( responsecode.FORBIDDEN, self.errorElements["originator-denied"], "Originator cannot be scheduled", )) self.originator = originatorAddress @inlineCallbacks def checkRecipients(self): results = [] for recipient in self.recipients: recipientAddress = yield calendarUserFromCalendarUserAddress(recipient, self.txn) if not recipientAddress.hosted(): if isinstance(recipientAddress, InvalidCalendarUser): log.error("Unknown calendar user address: {r}", r=recipient,) results.append(recipientAddress) else: inbox = None if recipientAddress.validRecipient(): if isinstance(recipientAddress, LocalCalendarUser): recipient_home = yield self.txn.calendarHomeWithUID(recipientAddress.record.uid, create=True) if recipient_home: inbox = (yield recipient_home.calendarWithName("inbox")) else: inbox = "dummy" recipientAddress.inbox = inbox if inbox: results.append(recipientAddress) else: log.error("No scheduling for calendar user: {r}", r=recipient,) results.append(InvalidCalendarUser(recipient)) self.recipients = results @inlineCallbacks
Apache License 2.0
gnosis/safe-transaction-service
safe_transaction_service/utils/tasks.py
only_one_running_task
python
def only_one_running_task( task: CeleryTask, lock_name_suffix: Optional[str] = None, blocking_timeout: int = 1, lock_timeout: Optional[int] = LOCK_TIMEOUT, ): if WORKER_STOPPED: raise LockError("Worker is stopping") redis = get_redis() lock_name = f"tasks:{task.name}" if lock_name_suffix: lock_name = f"{lock_name}:{lock_name_suffix}" with redis.lock( lock_name, blocking_timeout=blocking_timeout, timeout=lock_timeout ) as lock: ACTIVE_LOCKS.add(lock_name) yield lock ACTIVE_LOCKS.remove(lock_name) close_gevent_db_connection()
Ensures one running task at the same, using `task` name as a unique key :param task: CeleryTask :param lock_name_suffix: A suffix for the lock name, in the case that the same task can be run at the same time when it has different arguments :param blocking_timeout: Waiting blocking timeout, it should be as small as possible to the worker can release the task :param lock_timeout: How long the lock will be stored, in case worker is halted so key is not stored forever in Redis :return: Instance of redis `Lock` :raises: LockError if lock cannot be acquired
https://github.com/gnosis/safe-transaction-service/blob/b26efe58f1c4bf89c5461c38065bc3c51ba4af04/safe_transaction_service/utils/tasks.py#L55-L85
import contextlib from typing import Optional, Set import gevent from celery.app.task import Task as CeleryTask from celery.signals import celeryd_init, worker_shutting_down from celery.utils.log import get_task_logger from redis.exceptions import LockError from .redis import get_redis from .utils import close_gevent_db_connection logger = get_task_logger(__name__) LOCK_TIMEOUT = 60 * 15 SOFT_TIMEOUT = 60 * 10 ACTIVE_LOCKS: Set[str] = set() WORKER_STOPPED = set() @celeryd_init.connect def configure_workers(sender=None, conf=None, **kwargs): def patch_psycopg(): try: from psycogreen.gevent import patch_psycopg logger.info("Patching psycopg for gevent") patch_psycopg() except ImportError: pass patch_psycopg() @worker_shutting_down.connect def worker_shutting_down_handler(sig, how, exitcode, **kwargs): logger.warning("Worker shutting down") gevent.spawn(shutdown_worker) def shutdown_worker(): WORKER_STOPPED.add(True) if ACTIVE_LOCKS: logger.warning("Force releasing of redis locks %s", ACTIVE_LOCKS) get_redis().delete(*ACTIVE_LOCKS) logger.warning("Released redis locks") else: logger.warning("No redis locks to release") @contextlib.contextmanager
MIT License
lrq3000/pyfilefixity
pyFileFixity/lib/profilers/visual/profilehooks.py
TraceFuncCoverage.__call__
python
def __call__(self, *args, **kw): self.ncalls += 1 if TraceFuncCoverage.tracing: return self.fn(*args, **kw) try: TraceFuncCoverage.tracing = True return self.tracer.runfunc(self.fn, *args, **kw) finally: TraceFuncCoverage.tracing = False
Profile a singe call to the function.
https://github.com/lrq3000/pyfilefixity/blob/fd5ef23bb13835faf1e3baa773619b86a1cc9bdf/pyFileFixity/lib/profilers/visual/profilehooks.py#L558-L567
__author__ = "Marius Gedminas (marius@gedmin.as)" __copyright__ = "Copyright 2004-2012 Marius Gedminas" __license__ = "MIT" __version__ = "1.6" __date__ = "2012-05-05" import atexit import inspect import sys import re from profile import Profile import pstats try: import hotshot import hotshot.stats except ImportError: hotshot = None import trace if hotshot is not None: import _hotshot import hotshot.log try: import cProfile except ImportError: cProfile = None import time AVAILABLE_PROFILERS = {} def profile(fn=None, skip=0, filename=None, immediate=False, dirs=False, sort=None, entries=40, profiler=('cProfile', 'profile', 'hotshot')): if fn is None: def decorator(fn): return profile(fn, skip=skip, filename=filename, immediate=immediate, dirs=dirs, sort=sort, entries=entries, profiler=profiler) return decorator if isinstance(profiler, str): profiler = [profiler] for p in profiler: if p in AVAILABLE_PROFILERS: profiler_class = AVAILABLE_PROFILERS[p] break else: raise ValueError('only these profilers are available: %s' % ', '.join(AVAILABLE_PROFILERS)) fp = profiler_class(fn, skip=skip, filename=filename, immediate=immediate, dirs=dirs, sort=sort, entries=entries) def new_fn(*args, **kw): return fp(*args, **kw) new_fn.__doc__ = fn.__doc__ new_fn.__name__ = fn.__name__ new_fn.__dict__ = fn.__dict__ new_fn.__module__ = fn.__module__ return new_fn def coverage(fn): fp = TraceFuncCoverage(fn) def new_fn(*args, **kw): return fp(*args, **kw) new_fn.__doc__ = fn.__doc__ new_fn.__name__ = fn.__name__ new_fn.__dict__ = fn.__dict__ new_fn.__module__ = fn.__module__ return new_fn def coverage_with_hotshot(fn): fp = HotShotFuncCoverage(fn) def new_fn(*args, **kw): return fp(*args, **kw) new_fn.__doc__ = fn.__doc__ new_fn.__name__ = fn.__name__ new_fn.__dict__ = fn.__dict__ new_fn.__module__ = fn.__module__ return new_fn class FuncProfile(object): in_profiler = False Profile = Profile def __init__(self, fn, skip=0, filename=None, immediate=False, dirs=False, sort=None, entries=40): self.fn = fn self.skip = skip self.filename = filename self.immediate = immediate self.dirs = dirs self.sort = sort or ('cumulative', 'time', 'calls') if isinstance(self.sort, str): self.sort = (self.sort, ) self.entries = entries self.reset_stats() atexit.register(self.atexit) def __call__(self, *args, **kw): self.ncalls += 1 if self.skip > 0: self.skip -= 1 self.skipped += 1 return self.fn(*args, **kw) if FuncProfile.in_profiler: return self.fn(*args, **kw) profiler = self.Profile() try: FuncProfile.in_profiler = True return profiler.runcall(self.fn, *args, **kw) finally: FuncProfile.in_profiler = False self.stats.add(profiler) if self.immediate: self.print_stats() self.reset_stats() def print_stats(self): funcname = self.fn.__name__ filename = self.fn.__code__.co_filename lineno = self.fn.__code__.co_firstlineno print("") print("*** PROFILER RESULTS ***") print("%s (%s:%s)" % (funcname, filename, lineno)) if self.skipped: skipped = "(%d calls not profiled)" % self.skipped else: skipped = "" print("function called %d times%s" % (self.ncalls, skipped)) print("") stats = self.stats if self.filename: stats.dump_stats(self.filename) if not self.dirs: stats.strip_dirs() stats.sort_stats(*self.sort) stats.print_stats(self.entries) def reset_stats(self): self.stats = pstats.Stats(Profile()) self.ncalls = 0 self.skipped = 0 def atexit(self): if not self.immediate: self.print_stats() AVAILABLE_PROFILERS['profile'] = FuncProfile if cProfile is not None: class CProfileFuncProfile(FuncProfile): Profile = cProfile.Profile AVAILABLE_PROFILERS['cProfile'] = CProfileFuncProfile if hotshot is not None: class HotShotFuncProfile(object): in_profiler = False def __init__(self, fn, skip=0, filename=None): self.fn = fn self.filename = filename if self.filename: self.logfilename = filename + ".raw" else: self.logfilename = fn.__name__ + ".prof" self.profiler = hotshot.Profile(self.logfilename) self.ncalls = 0 self.skip = skip self.skipped = 0 atexit.register(self.atexit) def __call__(self, *args, **kw): self.ncalls += 1 if self.skip > 0: self.skip -= 1 self.skipped += 1 return self.fn(*args, **kw) if HotShotFuncProfile.in_profiler: return self.fn(*args, **kw) try: HotShotFuncProfile.in_profiler = True return self.profiler.runcall(self.fn, *args, **kw) finally: HotShotFuncProfile.in_profiler = False def atexit(self): self.profiler.close() funcname = self.fn.__name__ filename = self.fn.__code__.co_filename lineno = self.fn.__code__.co_firstlineno print("") print("*** PROFILER RESULTS ***") print("%s (%s:%s)" % (funcname, filename, lineno)) if self.skipped: skipped = "(%d calls not profiled)" % self.skipped else: skipped = "" print("function called %d times%s" % (self.ncalls, skipped)) print("") stats = hotshot.stats.load(self.logfilename) if self.filename: stats.dump_stats(self.filename) stats.strip_dirs() stats.sort_stats('cumulative', 'time', 'calls') stats.print_stats(40) AVAILABLE_PROFILERS['hotshot'] = HotShotFuncProfile class HotShotFuncCoverage: def __init__(self, fn): self.fn = fn self.logfilename = fn.__name__ + ".cprof" self.profiler = _hotshot.coverage(self.logfilename) self.ncalls = 0 atexit.register(self.atexit) def __call__(self, *args, **kw): self.ncalls += 1 return self.profiler.runcall(self.fn, args, kw) def atexit(self): self.profiler.close() funcname = self.fn.__name__ filename = self.fn.__code__.co_filename lineno = self.fn.__code__.co_firstlineno print("") print("*** COVERAGE RESULTS ***") print("%s (%s:%s)" % (funcname, filename, lineno)) print("function called %d times" % self.ncalls) print("") fs = FuncSource(self.fn) reader = hotshot.log.LogReader(self.logfilename) for what, (filename, lineno, funcname), tdelta in reader: if filename != fs.filename: continue if what == hotshot.log.LINE: fs.mark(lineno) if what == hotshot.log.ENTER: if lineno == fs.firstlineno: lineno = fs.firstcodelineno fs.mark(lineno) reader.close() print(fs) class TraceFuncCoverage: tracer = trace.Trace(count=True, trace=False, ignoredirs=[sys.prefix, sys.exec_prefix]) tracing = False def __init__(self, fn): self.fn = fn self.logfilename = fn.__name__ + ".cprof" self.ncalls = 0 atexit.register(self.atexit)
MIT License
olitheolix/aiokubernetes
aiokubernetes/models/v1_pod.py
V1Pod.api_version
python
def api_version(self): return self._api_version
Gets the api_version of this V1Pod. # noqa: E501 APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources # noqa: E501 :return: The api_version of this V1Pod. # noqa: E501 :rtype: str
https://github.com/olitheolix/aiokubernetes/blob/266718b210dff2a9b2212183261ea89adf89115e/aiokubernetes/models/v1_pod.py#L74-L82
import pprint import re from aiokubernetes.models.v1_object_meta import V1ObjectMeta from aiokubernetes.models.v1_pod_spec import V1PodSpec from aiokubernetes.models.v1_pod_status import V1PodStatus class V1Pod(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'api_version': 'str', 'kind': 'str', 'metadata': 'V1ObjectMeta', 'spec': 'V1PodSpec', 'status': 'V1PodStatus' } attribute_map = { 'api_version': 'apiVersion', 'kind': 'kind', 'metadata': 'metadata', 'spec': 'spec', 'status': 'status' } def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None): self._api_version = None self._kind = None self._metadata = None self._spec = None self._status = None self.discriminator = None if api_version is not None: self.api_version = api_version if kind is not None: self.kind = kind if metadata is not None: self.metadata = metadata if spec is not None: self.spec = spec if status is not None: self.status = status @property
Apache License 2.0
tomplus/kubernetes_asyncio
kubernetes_asyncio/client/models/v1beta1_aggregation_rule.py
V1beta1AggregationRule.__ne__
python
def __ne__(self, other): if not isinstance(other, V1beta1AggregationRule): return True return self.to_dict() != other.to_dict()
Returns true if both objects are not equal
https://github.com/tomplus/kubernetes_asyncio/blob/22bf0f4ec775b920abc9cee86bb38abcfc57506d/kubernetes_asyncio/client/models/v1beta1_aggregation_rule.py#L117-L122
import pprint import re import six from kubernetes_asyncio.client.configuration import Configuration class V1beta1AggregationRule(object): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'cluster_role_selectors': 'list[V1LabelSelector]' } attribute_map = { 'cluster_role_selectors': 'clusterRoleSelectors' } def __init__(self, cluster_role_selectors=None, local_vars_configuration=None): if local_vars_configuration is None: local_vars_configuration = Configuration() self.local_vars_configuration = local_vars_configuration self._cluster_role_selectors = None self.discriminator = None if cluster_role_selectors is not None: self.cluster_role_selectors = cluster_role_selectors @property def cluster_role_selectors(self): return self._cluster_role_selectors @cluster_role_selectors.setter def cluster_role_selectors(self, cluster_role_selectors): self._cluster_role_selectors = cluster_role_selectors def to_dict(self): result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, V1beta1AggregationRule): return False return self.to_dict() == other.to_dict()
Apache License 2.0
ibm-security/ibmsecurity
ibmsecurity/isam/base/service_agreement.py
get
python
def get(isamAppliance, check_mode=False, force=False): return isamAppliance.invoke_get("Retrieving the service agreement settings", "/setup_service_agreements/accepted")
Retrieving the service agreement settings
https://github.com/ibm-security/ibmsecurity/blob/da098f7d555e571a99a0d7cd47a51add483feb6f/ibmsecurity/isam/base/service_agreement.py#L7-L11
import logging logger = logging.getLogger(__name__) uri = "/setup_service_agreements"
Apache License 2.0
googlecloudplatform/django-cloud-deploy
django_cloud_deploy/tests/lib/test_base.py
ResourceCleanUp.clean_up_database
python
def clean_up_database(self, instance_name: str, database_name: str): try: yield finally: self._clean_up_database(instance_name, database_name)
A context manager to delete the given Cloud SQL database. Args: instance_name: Name of the Cloud SQL instance the database belongs to. database_name: Name of the database to delete. Yields: None
https://github.com/googlecloudplatform/django-cloud-deploy/blob/d316b1e45357761e2b124143e6e12ce34ef6f975/django_cloud_deploy/tests/lib/test_base.py#L546-L560
import contextlib import os import shutil import subprocess import tempfile from typing import Any, List, Dict, Optional import yaml from absl.testing import absltest from google.oauth2 import service_account import django_cloud_deploy.tests from django_cloud_deploy.tests.lib import utils from django_cloud_deploy.skeleton import source_generator import googleapiclient from googleapiclient import discovery from googleapiclient import errors def _load_test_config(): dirname, _ = os.path.split( os.path.abspath(django_cloud_deploy.tests.__file__)) config_path = os.path.join(dirname, 'integration', 'data', 'integration_test_config.yaml') with open(config_path) as config_file: config_file_content = config_file.read() return yaml.load(config_file_content, Loader=yaml.FullLoader) _TEST_CONFIG = _load_test_config() class BaseTest(absltest.TestCase): def setUp(self): self.service_account_key_path = os.environ.get( 'GOOGLE_APPLICATION_CREDENTIALS') self.credentials = ( service_account.Credentials.from_service_account_file( self.service_account_key_path, scopes=['https://www.googleapis.com/auth/cloud-platform'])) @property def zone(self): return _TEST_CONFIG['zone'] @property def project_id(self): return _TEST_CONFIG['project_id'] @property def project_name(self): return _TEST_CONFIG['project_name'] class DjangoFileGeneratorTest(BaseTest): @property def database_user(self): return _TEST_CONFIG['database_user'] @property def database_password(self): return _TEST_CONFIG['database_password'] def setUp(self): super().setUp() self.project_dir = tempfile.mkdtemp() image_name = utils.get_resource_name(resource_type='image') self.image_tag = '/'.join(['gcr.io', self.project_id, image_name]) self.instance_name = utils.get_resource_name( resource_type='sql-instance') self.database_name = utils.get_resource_name(resource_type='db') app_name = 'fake_app' generator = source_generator.DjangoSourceFileGenerator() generator.generate_new(project_id=self.project_id, project_name=self.project_name, app_name=app_name, project_dir=self.project_dir, database_user=self.database_user, database_password=self.database_password, instance_name=self.instance_name, database_name=self.database_name, image_tag=self.image_tag) def tearDown(self): shutil.rmtree(self.project_dir) class ResourceList(BaseTest): def list_service_accounts(self, service=None): service = service or discovery.build( 'iam', 'v1', credentials=self.credentials, cache_discovery=False) resource_name = '/'.join(['projects', self.project_id]) request = service.projects().serviceAccounts().list(name=resource_name) accounts = [] while request: response = request.execute() accounts += [ account['email'] for account in response.get('accounts', []) ] request = service.projects().serviceAccounts().list_next( previous_request=request, previous_response=response) return accounts def list_clusters(self, service=None): service = service or discovery.build('container', 'v1', credentials=self.credentials, cache_discovery=False) request = service.projects().zones().clusters().list( projectId=self.project_id, zone=self.zone) response = request.execute() return [ cluster.get('name', '') for cluster in response.get('clusters', []) ] def list_buckets(self, service=None): service = service or discovery.build('storage', 'v1', credentials=self.credentials, cache_discovery=False) request = service.buckets().list(project=self.project_id) response = request.execute() return [bucket.get('name', '') for bucket in response.get('items', [])] def list_enabled_services(self, service_usage_service=None): service_usage_service = service_usage_service or discovery.build( 'serviceusage', 'v1', credentials=self.credentials, cache_discovery=False) parent = '/'.join(['projects', self.project_id]) request = service_usage_service.services().list(parent=parent, filter='state:ENABLED') response = request.execute() return [ service['config']['name'] for service in response.get('services', []) ] def list_instances(self, service=None): service = service or discovery.build('sqladmin', 'v1beta4', cache_discovery=False, credentials=self.credentials) request = service.instances().list(project=self.project_id) response = request.execute() instances = [item['name'] for item in response.get('items', [])] return instances def list_databases(self, instance_name, service=None): service = service or discovery.build('sqladmin', 'v1beta4', cache_discovery=False, credentials=self.credentials) request = service.databases().list(project=self.project_id, instance=instance_name) response = request.execute() databases = [item['name'] for item in response.get('items', [])] return databases class ResourceCleanUp(BaseTest): def _delete_cluster( self, cluster_name: str, service: Optional[googleapiclient.discovery.Resource] = None): container_service = service or discovery.build( 'container', 'v1', credentials=self.credentials, cache_discovery=False) request = container_service.projects().zones().clusters().delete( projectId=self.project_id, zone=self.zone, clusterId=cluster_name) try: request.execute() except errors.HttpError: pass @contextlib.contextmanager def clean_up_appengine_service(self, service_id: str): try: yield finally: appengine_service = discovery.build('appengine', 'v1', credentials=self.credentials, cache_discovery=False) request = appengine_service.apps().services().delete( appsId=self.project_id, servicesId=service_id) try: request.execute(num_retries=5) except errors.HttpError: pass def _delete_objects(self, bucket_name: str, storage_service: googleapiclient.discovery.Resource): request = storage_service.objects().list(bucket=bucket_name) response = request.execute(num_retries=5) if 'items' in response: object_names = [item['name'] for item in response['items']] for object_name in object_names: request = storage_service.objects().delete(bucket=bucket_name, object=object_name) try: request.execute(num_retries=5) except errors.HttpError: pass def _delete_bucket( self, bucket_name: str, service: Optional[googleapiclient.discovery.Resource] = None): storage_service = service or discovery.build( 'storage', 'v1', credentials=self.credentials, cache_discovery=False) self._delete_objects(bucket_name, storage_service) request = storage_service.buckets().delete(bucket=bucket_name) try: request.execute(num_retries=5) except errors.HttpError: pass def _delete_service_account( self, service_account_email: str, service: Optional[googleapiclient.discovery.Resource] = None): iam_service = service or discovery.build( 'iam', 'v1', credentials=self.credentials, cache_discovery=False) resource_name = 'projects/{}/serviceAccounts/{}'.format( self.project_id, service_account_email) request = iam_service.projects().serviceAccounts().delete( name=resource_name) try: request.execute(num_retries=5) except errors.HttpError: pass def _reset_iam_policy( self, member: str, roles: List[str], service: Optional[googleapiclient.discovery.Resource] = None): cloudresourcemanager_service = service or discovery.build( 'cloudresourcemanager', 'v1', credentials=self.credentials, cache_discovery=False) request = cloudresourcemanager_service.projects().getIamPolicy( resource=self.project_id) policy = request.execute(num_retries=5) for role in roles: for binding in policy['bindings']: if binding['role'] == role and member in binding['members']: binding['members'].remove(member) break policy['bindings'] = [b for b in policy['bindings'] if b['members']] body = {'policy': policy} request = cloudresourcemanager_service.projects().setIamPolicy( resource=self.project_id, body=body) try: request.execute(num_retries=5) except errors.HttpError: pass def _clean_up_sql_instance( self, instance_name: str, service: Optional[googleapiclient.discovery.Resource] = None): sqladmin_service = service or discovery.build( 'sqladmin', 'v1beta4', credentials=self.credentials, cache_discovery=False) request = sqladmin_service.instances().delete(instance=instance_name, project=self.project_id) try: request.execute(num_retries=5) except errors.HttpError: pass def _clean_up_database( self, instance_name: str, database_name: str, service: Optional[googleapiclient.discovery.Resource] = None): sqladmin_service = service or discovery.build( 'sqladmin', 'v1beta4', credentials=self.credentials, cache_discovery=False) request = sqladmin_service.databases().delete(database=database_name, instance=instance_name, project=self.project_id) try: request.execute(num_retries=5) except errors.HttpError: pass @contextlib.contextmanager def clean_up_cluster(self, cluster_name: str): try: yield finally: self._delete_cluster(cluster_name) @contextlib.contextmanager def clean_up_bucket(self, bucket_name: str): try: yield finally: self._delete_bucket(bucket_name) @contextlib.contextmanager def clean_up_docker_image(self, image_name: str): try: yield finally: digests = subprocess.check_output( [ 'gcloud', 'container', 'images', 'list-tags', '--format=value(digest)', image_name ], universal_newlines=True).rstrip().split('\n') for digest in digests: full_image_name = '{}@sha256:{}'.format(image_name, digest) subprocess.check_call([ 'gcloud', '-q', 'container', 'images', 'delete', '--force-delete-tags', full_image_name ]) @contextlib.contextmanager def disable_services(self, services: List[Dict[str, Any]]): try: yield finally: service_usage_service = discovery.build( 'serviceusage', 'v1', credentials=self.credentials, cache_discovery=False) for service in services: service_name = '/'.join( ['projects', self.project_id, 'services', service['name']]) request = service_usage_service.services().disable( name=service_name, body={'disableDependentServices': False}) try: request.execute(num_retries=5) except errors.HttpError: pass @contextlib.contextmanager def delete_service_account(self, service_account_email: str): try: yield finally: self._delete_service_account(service_account_email) @contextlib.contextmanager def reset_iam_policy(self, member: str, roles: List[str]): try: yield finally: self._reset_iam_policy(member, roles) @contextlib.contextmanager def clean_up_sql_instance(self, instance_name: str): try: yield finally: self._clean_up_sql_instance(instance_name) @contextlib.contextmanager
Apache License 2.0
ambianic/peerjs-python
src/peerjs/api.py
API.__init__
python
def __init__(self, options: Any = None): self._options = options log.debug('API options: %s', options) self._http_session = aiohttp.ClientSession()
Create API instance.
https://github.com/ambianic/peerjs-python/blob/1f5844cf9a4701fb336f7336c1287a4059da0242/src/peerjs/api.py#L27-L31
import logging import random import time from typing import Any import aiohttp from .util import util log = logging.getLogger(__name__) class HttpMethod: GET = "GET" POST = "POST" PUT = "PUT" DELETE = "DELETE" class API:
Apache License 2.0
jlmadurga/permabots
permabots/views/api/handler.py
SourceStateList.get
python
def get(self, request, bot_id, id, format=None): return super(SourceStateList, self).get(request, bot_id, id, format)
Get list of source state of a handler --- serializer: StateSerializer responseMessages: - code: 401 message: Not authenticated
https://github.com/jlmadurga/permabots/blob/f1bfb3081ed0da2a4cb4b0537e625ccfbab58ee1/permabots/views/api/handler.py#L332-L341
from permabots.serializers import HandlerSerializer, AbsParamSerializer, StateSerializer, HandlerUpdateSerializer from permabots.models import Handler, Request, Hook, UrlParam, HeaderParam, State from permabots.models import Response as handlerResponse from rest_framework.response import Response from rest_framework import status import logging from django.http.response import Http404 from rest_framework import exceptions from permabots.views.api.base import ListBotAPIView, PermabotsAPIView, DetailBotAPIView, ObjectBotListView import json logger = logging.getLogger(__name__) class HandlerList(ListBotAPIView): serializer = HandlerSerializer def _query(self, bot): return bot.handlers.all() def _creator(self, bot, serializer): target_state = None request = None if 'target_state' in serializer.data: target_state, _ = State.objects.get_or_create(bot=bot, name=serializer.data['target_state']['name']) if 'request' in serializer.data: data = serializer.data['request'].get('data', None) if data: data = json.dumps(data) request = Request.objects.create(url_template=serializer.data['request']['url_template'], method=serializer.data['request']['method'], data=data) response = handlerResponse.objects.create(text_template=serializer.data['response']['text_template'], keyboard_template=serializer.data['response']['keyboard_template']) return Handler.objects.create(bot=bot, name=serializer.data['name'], pattern=serializer.data['pattern'], priority=serializer.data.get('priority', 0), response=response, enabled=serializer.data['enabled'], request=request, target_state=target_state) def get(self, request, bot_id, format=None): return super(HandlerList, self).get(request, bot_id, format) def post(self, request, bot_id, format=None): return super(HandlerList, self).post(request, bot_id, format) class HandlerDetail(DetailBotAPIView): model = Handler serializer = HandlerSerializer serializer_update = HandlerUpdateSerializer def get(self, request, bot_id, id, format=None): return super(HandlerDetail, self).get(request, bot_id, id, format) def put(self, request, bot_id, id, format=None): return super(HandlerDetail, self).put(request, bot_id, id, format) def delete(self, request, bot_id, id, format=None): return super(HandlerDetail, self).delete(request, bot_id, id, format) class UrlParameterList(ObjectBotListView): serializer = AbsParamSerializer obj_model = Handler def _query(self, bot, obj): return obj.request.url_parameters.all() def _creator(self, obj, serializer): return UrlParam.objects.create(key=serializer.data['key'], value_template=serializer.data['value_template'], request=obj.request) def get(self, request, bot_id, id, format=None): return super(UrlParameterList, self).get(request, bot_id, id, format) def post(self, request, bot_id, id, format=None): return super(UrlParameterList, self).post(request, bot_id, id, format) class HeaderParameterList(ObjectBotListView): serializer = AbsParamSerializer obj_model = Handler def _query(self, bot, obj): return obj.request.header_parameters.all() def _creator(self, obj, serializer): return HeaderParam.objects.create(key=serializer.data['key'], value_template=serializer.data['value_template'], request=obj.request) def get(self, request, bot_id, id, format=None): return super(HeaderParameterList, self).get(request, bot_id, id, format) def post(self, request, bot_id, id, format=None): return super(HeaderParameterList, self).post(request, bot_id, id, format) class RequestDetailView(PermabotsAPIView): model = None serializer = None def get_handler(self, id, bot, user): try: handler = Handler.objects.get(id=id, bot=bot) if handler.bot.owner != user: raise exceptions.AuthenticationFailed() return handler except Handler.DoesNotExist: raise Http404 def _user(self, handler): return handler.bot.owner def get_object(self, id, handler, user): try: obj = self.model.objects.get(id=id, request=handler.request) if self._user(handler) != user: raise exceptions.AuthenticationFailed() return obj except self.model.DoesNotExist: raise Http404 def get(self, request, bot_id, handler_id, id, format=None): bot = self.get_bot(bot_id, request.user) handler = self.get_handler(handler_id, bot, request.user) obj = self.get_object(id, handler, request.user) serializer = self.serializer(obj) return Response(serializer.data) def put(self, request, bot_id, handler_id, id, format=None): bot = self.get_bot(bot_id, request.user) handler = self.get_handler(handler_id, bot, request.user) obj = self.get_object(id, handler, request.user) serializer = self.serializer(obj, data=request.data) if serializer.is_valid(): obj = serializer.save() return Response(self.serializer(obj).data) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def delete(self, request, bot_id, handler_id, id, format=None): bot = self.get_bot(bot_id, request.user) handler = self.get_handler(handler_id, bot, request.user) obj = self.get_object(id, handler, request.user) obj.delete() return Response(status=status.HTTP_204_NO_CONTENT) class UrlParameterDetail(RequestDetailView): model = UrlParam serializer = AbsParamSerializer def get(self, request, bot_id, handler_id, id, format=None): return super(UrlParameterDetail, self).get(request, bot_id, handler_id, id, format) def put(self, request, bot_id, handler_id, id, format=None): return super(UrlParameterDetail, self).put(request, bot_id, handler_id, id, format) def delete(self, request, bot_id, handler_id, id, format=None): return super(UrlParameterDetail, self).delete(request, bot_id, handler_id, id, format) class HeaderParameterDetail(RequestDetailView): model = HeaderParam serializer = AbsParamSerializer def get(self, request, bot_id, handler_id, id, format=None): return super(HeaderParameterDetail, self).get(request, bot_id, handler_id, id, format) def put(self, request, bot_id, handler_id, id, format=None): return super(HeaderParameterDetail, self).put(request, bot_id, handler_id, id, format) def delete(self, request, bot_id, handler_id, id, format=None): return super(HeaderParameterDetail, self).delete(request, bot_id, handler_id, id, format) class FromHandlerViewMixin(object): def get_handler(self, id, bot, user): try: handler = Handler.objects.get(id=id, bot=bot) if handler.bot.owner != user: raise exceptions.AuthenticationFailed() return handler except Hook.DoesNotExist: raise Http404 class SourceStateList(ObjectBotListView): serializer = StateSerializer obj_model = Handler def _query(self, bot, obj): return obj.source_states.all() def _creator(self, obj, serializer): state, _ = State.objects.get_or_create(name=serializer.data['name'], bot=obj.bot) obj.source_states.add(state) return state
BSD 3-Clause New or Revised License