repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
googlefonts/cu2qu
Lib/cu2qu/ufo.py
glyph_to_quadratic
python
def glyph_to_quadratic(glyph, **kwargs): return glyphs_to_quadratic([glyph], **kwargs)
Convenience wrapper around glyphs_to_quadratic, for just one glyph. Return True if the glyph was modified, else return False.
https://github.com/googlefonts/cu2qu/blob/6812ae48c6b81aee95949d64e2a9489c333d825d/Lib/cu2qu/ufo.py#L311-L316
from __future__ import print_function, division, absolute_import import logging from fontTools.pens.basePen import AbstractPen from fontTools.pens.pointPen import PointToSegmentPen from fontTools.pens.reverseContourPen import ReverseContourPen from cu2qu import curves_to_quadratic from cu2qu.errors import ( UnequalZipLengthsError, IncompatibleSegmentNumberError, IncompatibleSegmentTypesError, IncompatibleGlyphsError, IncompatibleFontsError) __all__ = ['fonts_to_quadratic', 'font_to_quadratic'] DEFAULT_MAX_ERR = 0.001 CURVE_TYPE_LIB_KEY = "com.github.googlei18n.cu2qu.curve_type" logger = logging.getLogger(__name__) _zip = zip def zip(*args): if len(set(len(a) for a in args)) != 1: raise UnequalZipLengthsError(*args) return list(_zip(*args)) class GetSegmentsPen(AbstractPen): def __init__(self): self._last_pt = None self.segments = [] def _add_segment(self, tag, *args): if tag in ['move', 'line', 'qcurve', 'curve']: self._last_pt = args[-1] self.segments.append((tag, args)) def moveTo(self, pt): self._add_segment('move', pt) def lineTo(self, pt): self._add_segment('line', pt) def qCurveTo(self, *points): self._add_segment('qcurve', self._last_pt, *points) def curveTo(self, *points): self._add_segment('curve', self._last_pt, *points) def closePath(self): self._add_segment('close') def endPath(self): self._add_segment('end') def addComponent(self, glyphName, transformation): pass def _get_segments(glyph): pen = GetSegmentsPen() pointPen = PointToSegmentPen(pen, outputImpliedClosingLine=True) glyph.drawPoints(pointPen) return pen.segments def _set_segments(glyph, segments, reverse_direction): glyph.clearContours() pen = glyph.getPen() if reverse_direction: pen = ReverseContourPen(pen) for tag, args in segments: if tag == 'move': pen.moveTo(*args) elif tag == 'line': pen.lineTo(*args) elif tag == 'curve': pen.curveTo(*args[1:]) elif tag == 'qcurve': pen.qCurveTo(*args[1:]) elif tag == 'close': pen.closePath() elif tag == 'end': pen.endPath() else: raise AssertionError('Unhandled segment type "%s"' % tag) def _segments_to_quadratic(segments, max_err, stats): assert all(s[0] == 'curve' for s in segments), 'Non-cubic given to convert' new_points = curves_to_quadratic([s[1] for s in segments], max_err) n = len(new_points[0]) assert all(len(s) == n for s in new_points[1:]), 'Converted incompatibly' spline_length = str(n - 2) stats[spline_length] = stats.get(spline_length, 0) + 1 return [('qcurve', p) for p in new_points] def _glyphs_to_quadratic(glyphs, max_err, reverse_direction, stats): try: segments_by_location = zip(*[_get_segments(g) for g in glyphs]) except UnequalZipLengthsError: raise IncompatibleSegmentNumberError(glyphs) if not any(segments_by_location): return False glyphs_modified = reverse_direction new_segments_by_location = [] incompatible = {} for i, segments in enumerate(segments_by_location): tag = segments[0][0] if not all(s[0] == tag for s in segments[1:]): incompatible[i] = [s[0] for s in segments] elif tag == 'curve': segments = _segments_to_quadratic(segments, max_err, stats) glyphs_modified = True new_segments_by_location.append(segments) if glyphs_modified: new_segments_by_glyph = zip(*new_segments_by_location) for glyph, new_segments in zip(glyphs, new_segments_by_glyph): _set_segments(glyph, new_segments, reverse_direction) if incompatible: raise IncompatibleSegmentTypesError(glyphs, segments=incompatible) return glyphs_modified def glyphs_to_quadratic( glyphs, max_err=None, reverse_direction=False, stats=None): if stats is None: stats = {} if not max_err: max_err = DEFAULT_MAX_ERR * 1000 if isinstance(max_err, (list, tuple)): max_errors = max_err else: max_errors = [max_err] * len(glyphs) assert len(max_errors) == len(glyphs) return _glyphs_to_quadratic(glyphs, max_errors, reverse_direction, stats) def fonts_to_quadratic( fonts, max_err_em=None, max_err=None, reverse_direction=False, stats=None, dump_stats=False, remember_curve_type=True): if remember_curve_type: curve_types = {f.lib.get(CURVE_TYPE_LIB_KEY, "cubic") for f in fonts} if len(curve_types) == 1: curve_type = next(iter(curve_types)) if curve_type == "quadratic": logger.info("Curves already converted to quadratic") return False elif curve_type == "cubic": pass else: raise NotImplementedError(curve_type) elif len(curve_types) > 1: logger.warning("fonts may contain different curve types") if stats is None: stats = {} if max_err_em and max_err: raise TypeError('Only one of max_err and max_err_em can be specified.') if not (max_err_em or max_err): max_err_em = DEFAULT_MAX_ERR if isinstance(max_err, (list, tuple)): assert len(max_err) == len(fonts) max_errors = max_err elif max_err: max_errors = [max_err] * len(fonts) if isinstance(max_err_em, (list, tuple)): assert len(fonts) == len(max_err_em) max_errors = [f.info.unitsPerEm * e for f, e in zip(fonts, max_err_em)] elif max_err_em: max_errors = [f.info.unitsPerEm * max_err_em for f in fonts] modified = False glyph_errors = {} for name in set().union(*(f.keys() for f in fonts)): glyphs = [] cur_max_errors = [] for font, error in zip(fonts, max_errors): if name in font: glyphs.append(font[name]) cur_max_errors.append(error) try: modified |= _glyphs_to_quadratic( glyphs, cur_max_errors, reverse_direction, stats) except IncompatibleGlyphsError as exc: logger.error(exc) glyph_errors[name] = exc if glyph_errors: raise IncompatibleFontsError(glyph_errors) if modified and dump_stats: spline_lengths = sorted(stats.keys()) logger.info('New spline lengths: %s' % (', '.join( '%s: %d' % (l, stats[l]) for l in spline_lengths))) if remember_curve_type: for font in fonts: curve_type = font.lib.get(CURVE_TYPE_LIB_KEY, "cubic") if curve_type != "quadratic": font.lib[CURVE_TYPE_LIB_KEY] = "quadratic" modified = True return modified
Apache License 2.0
megvii-basedetection/dynamicrouting
dl_lib/utils/checkpoint.py
get_unexpected_parameters_message
python
def get_unexpected_parameters_message(keys: list): groups = _group_checkpoint_keys(keys) msg = "The checkpoint contains parameters not used by the model:\n" msg += "\n".join(" " + colored(k + _group_to_str(v), "magenta") for k, v in groups.items()) return msg
Get a logging-friendly message to report parameter names (keys) that are in the checkpoint but not found in the model. Args: keys (list[str]): List of keys that were not found in the model. Returns: str: message.
https://github.com/megvii-basedetection/dynamicrouting/blob/2ad0a95139b1bf21878dd222854f98974ac4930a/dl_lib/utils/checkpoint.py#L313-L326
import collections import copy import logging import os from collections import defaultdict from typing import Any import numpy as np import torch import torch.nn as nn from termcolor import colored from torch.nn.parallel import DataParallel, DistributedDataParallel from dl_lib.utils.file_io import PathManager class Checkpointer(object): def __init__( self, model: nn.Module, save_dir: str = "", *, save_to_disk: bool = True, **checkpointables: object, ): if isinstance(model, (DistributedDataParallel, DataParallel)): model = model.module self.model = model self.checkpointables = copy.copy(checkpointables) self.logger = logging.getLogger(__name__) self.save_dir = save_dir self.save_to_disk = save_to_disk def save(self, name: str, **kwargs: dict): if not self.save_dir or not self.save_to_disk: return data = {} data["model"] = self.model.state_dict() for key, obj in self.checkpointables.items(): data[key] = obj.state_dict() data.update(kwargs) basename = "{}.pth".format(name) save_file = os.path.join(self.save_dir, basename) assert os.path.basename(save_file) == basename, basename self.logger.info("Saving checkpoint to {}".format(save_file)) with PathManager.open(save_file, "wb") as f: torch.save(data, f) self.tag_last_checkpoint(basename) def load(self, path: str): if not path: self.logger.info( "No checkpoint found. Initializing model from scratch") return {} self.logger.info("Loading checkpoint from {}".format(path)) if not os.path.isfile(path): path = PathManager.get_local_path(path) assert os.path.isfile(path), "Checkpoint {} not found!".format( path) checkpoint = self._load_file(path) self._load_model(checkpoint) for key, obj in self.checkpointables.items(): if key in checkpoint: self.logger.info("Loading {} from {}".format(key, path)) obj.load_state_dict(checkpoint.pop(key)) return checkpoint def has_checkpoint(self): save_file = os.path.join(self.save_dir, "last_checkpoint") return PathManager.exists(save_file) def get_checkpoint_file(self): save_file = os.path.join(self.save_dir, "last_checkpoint") try: with PathManager.open(save_file, "r") as f: last_saved = f.read().strip() except IOError: return "" return os.path.join(self.save_dir, last_saved) def get_all_checkpoint_files(self): all_model_checkpoints = [ os.path.join(self.save_dir, file) for file in PathManager.ls(self.save_dir) if PathManager.isfile(os.path.join(self.save_dir, file)) and file.endswith(".pth") ] return all_model_checkpoints def resume_or_load(self, path: str, *, resume: bool = True): if resume and self.has_checkpoint(): path = self.get_checkpoint_file() return self.load(path) def tag_last_checkpoint(self, last_filename_basename: str): save_file = os.path.join(self.save_dir, "last_checkpoint") with PathManager.open(save_file, "w") as f: f.write(last_filename_basename) def _load_file(self, f: str): return torch.load(f, map_location=torch.device("cpu")) def _load_model(self, checkpoint: Any): checkpoint_state_dict = checkpoint.pop("model") self._convert_ndarray_to_tensor(checkpoint_state_dict) _strip_prefix_if_present(checkpoint_state_dict, "module.") model_state_dict = self.model.state_dict() for k in list(checkpoint_state_dict.keys()): if k in model_state_dict: shape_model = tuple(model_state_dict[k].shape) shape_checkpoint = tuple(checkpoint_state_dict[k].shape) if shape_model != shape_checkpoint: self.logger.warning( "'{}' has shape {} in the checkpoint but {} in the " "model! Skipped.".format(k, shape_checkpoint, shape_model)) checkpoint_state_dict.pop(k) incompatible = self.model.load_state_dict(checkpoint_state_dict, strict=False) if incompatible.missing_keys: self.logger.info( get_missing_parameters_message(incompatible.missing_keys)) if incompatible.unexpected_keys: self.logger.info( get_unexpected_parameters_message( incompatible.unexpected_keys)) def _convert_ndarray_to_tensor(self, state_dict: dict): for k in list(state_dict.keys()): v = state_dict[k] if not isinstance(v, np.ndarray) and not isinstance( v, torch.Tensor): raise ValueError( "Unsupported type found in checkpoint! {}: {}".format( k, type(v))) if not isinstance(v, torch.Tensor): state_dict[k] = torch.from_numpy(v) class PeriodicCheckpointer: def __init__(self, checkpointer: Any, period: int, max_iter: int = None): self.checkpointer = checkpointer self.period = int(period) self.max_iter = max_iter def step(self, iteration: int, **kwargs: Any): iteration = int(iteration) additional_state = {"iteration": iteration} additional_state.update(kwargs) if (iteration + 1) % self.period == 0: self.checkpointer.save("model_{:07d}".format(iteration), **additional_state) if iteration >= self.max_iter - 1: self.checkpointer.save("model_final", **additional_state) def save(self, name: str, **kwargs: Any): self.checkpointer.save(name, **kwargs) def get_missing_parameters_message(keys: list): groups = _group_checkpoint_keys(keys) msg = "Some model parameters are not in the checkpoint:\n" msg += "\n".join(" " + colored(k + _group_to_str(v), "blue") for k, v in groups.items()) return msg
Apache License 2.0
apache/allura
Allura/allura/tasks/mail_tasks.py
sendmail
python
def sendmail(fromaddr, destinations, text, reply_to, subject, message_id, in_reply_to=None, sender=None, references=None, metalink=None): from allura import model as M addrs_plain = [] addrs_multi = [] if fromaddr is None: fromaddr = g.noreply elif not isinstance(fromaddr, six.string_types) or '@' not in fromaddr: log.warning('Looking up user with fromaddr: %s', fromaddr) user = M.User.query.get(_id=ObjectId(fromaddr), disabled=False, pending=False) if not user: log.warning('Cannot find user with ID: %s', fromaddr) fromaddr = g.noreply else: fromaddr = user.email_address_header() for addr in destinations: if mail_util.isvalid(addr): addrs_plain.append(addr) else: try: user = M.User.query.get(_id=ObjectId(addr), disabled=False, pending=False) if not user: log.warning('Cannot find user with ID: %s', addr) continue except Exception: log.exception('Error looking up user with ID: %r' % addr) continue addr = user.email_address_header() if not addr and user.email_addresses: addr = user.email_addresses[0] log.warning( 'User %s has not set primary email address, using %s', user._id, addr) if not addr: log.error( "User %s (%s) has not set any email address, can't deliver", user._id, user.username) continue if user.get_pref('email_format') == 'plain': addrs_plain.append(addr) else: addrs_multi.append(addr) multi_msg, plain_msg = create_multipart_msg(text, metalink) smtp_client.sendmail( addrs_multi, fromaddr, reply_to, subject, message_id, in_reply_to, multi_msg, sender=sender, references=references) smtp_client.sendmail( addrs_plain, fromaddr, reply_to, subject, message_id, in_reply_to, plain_msg, sender=sender, references=references)
Send an email to the specified list of destinations with respect to the preferred email format specified by user. It is best for broadcast messages. :param fromaddr: ObjectId or str(ObjectId) of user, or email address str
https://github.com/apache/allura/blob/04f14f15a9a9364e18c61f68acdaa241a470186b/Allura/allura/tasks/mail_tasks.py#L144-L201
from __future__ import unicode_literals from __future__ import absolute_import import logging import six.moves.html_parser import re from tg import tmpl_context as c, app_globals as g, config from bson import ObjectId import markupsafe from allura.lib import helpers as h from allura.lib.decorators import task from allura.lib import mail_util from allura.lib import exceptions as exc import six log = logging.getLogger(__name__) smtp_client = mail_util.SMTPClient() def mail_meta_content(metalink): return markupsafe.Markup("""\ <div itemscope itemtype="http://schema.org/EmailMessage"> <div itemprop="action" itemscope itemtype="http://schema.org/ViewAction"> <link itemprop="url" href="%s"></link> <meta itemprop="name" content="View"></meta> </div> <meta itemprop="description" content="View"></meta> </div>""" % metalink) @task def route_email( peer, mailfrom, rcpttos, data): try: msg = mail_util.parse_message(data) except Exception: log.exception('Parse Error: (%r,%r,%r)', peer, mailfrom, rcpttos) return if mail_util.is_autoreply(msg): log.info('Skipping autoreply message: %s', msg['headers']) return mail_user = mail_util.identify_sender(peer, mailfrom, msg['headers'], msg) with h.push_config(c, user=mail_user): log.info('Received email from %s', c.user.username) for addr in rcpttos: try: userpart, project, app = mail_util.parse_address(addr) with h.push_config(c, project=project, app=app): if not app.has_access(c.user, userpart): log.info('Access denied for %s to mailbox %s', c.user, userpart) elif not c.app.config.options.get('AllowEmailPosting', True): log.info("Posting from email is not enabled") else: if msg['multipart']: msg_hdrs = msg['headers'] for part in msg['parts']: if part.get('content_type', '').startswith('multipart/'): continue msg = dict( headers=dict(msg_hdrs, **part['headers']), message_id=part['message_id'], in_reply_to=part['in_reply_to'], references=part['references'], filename=part['filename'], content_type=part['content_type'], payload=part['payload']) c.app.handle_message(userpart, msg) else: c.app.handle_message(userpart, msg) except exc.MailError as e: log.error('Error routing email to %s: %s', addr, e) except Exception: log.exception('Error routing mail to %s', addr) def create_multipart_msg(text, metalink=None): def replace_html(matchobj): text_within_div = matchobj.group(1) text_within_div = text_within_div.replace('</p>', '\n') text_within_div = markupsafe._striptags_re.sub('', text_within_div) return text_within_div plain_text = text plain_text = re.sub(r'<div class="markdown_content">(.*)</div>', replace_html, plain_text, flags=re.DOTALL, ) plain_text = six.moves.html_parser.HTMLParser().unescape(plain_text) plain_msg = mail_util.encode_email_part(plain_text, 'plain') html_text = g.forge_markdown(email=True).convert(text) if metalink: html_text = html_text + mail_meta_content(metalink) html_msg = mail_util.encode_email_part(html_text, 'html') multi_msg = mail_util.make_multipart_message(plain_msg, html_msg) return multi_msg, plain_msg @task
Apache License 2.0
mhaisham/steruell
grid/manager.py
GridManager.tile
python
def tile(self, coord) -> Union[Tile, None]: position = Vector2D.tuple(coord) x = (position.y - self.position.y - self.padding.y) / (self.tilesize.y + self.tilepadding.y) y = (position.x - self.position.x - self.padding.x) / (self.tilesize.x + self.tilepadding.x) ix = math.floor(x) iy = math.floor(y) if ix < 0 or ix >= self.size.x or iy < 0 or iy >= self.size.y: return return self.tiles[ix][iy]
:returns: tile depending on value in case of tuple :return: tile in pixel positon (x, y)
https://github.com/mhaisham/steruell/blob/01ac74b36b3290a38e815a39d6becf68ce11c221/grid/manager.py#L330-L350
import math from typing import Union, List import easygui import pygame from core import Vector2D, Switch from widgets import Text from .algorithm import AStarAlgorithm from .memory import AppDatabase, key_map from .tile import Tile class GridManager: def __init__(self, size: Vector2D, info_text: Text, position=Vector2D(0, 20), padding=Vector2D(0, 0)): self.size = size self.position = position self.padding = padding self.info_text = info_text self.info_text.text = 'Select start' self.tilepadding = Vector2D(0, 0) screen_size = Vector2D.tuple(pygame.display.get_surface().get_rect().size) space = Vector2D( screen_size.x - position.x - padding.x, screen_size.y - position.y - padding.y ) self.tilesize = Vector2D( int((space.x - (self.size.x * self.tilepadding.x)) / self.size.x), int((space.y - (self.size.y * self.tilepadding.y)) / self.size.y), ) self.grid = [] for x in range(size.x): l = [0] * size.y self.grid.append(l) self.tiles = [] for x in range(size.x): l = [None] * size.y self.tiles.append(l) self.remake_tiles(self.grid) self.drawable = Switch(True) self.mouse_left_down = Switch(False) self.mouse_left_down_type = None self.start = None self.end = None self.algorithm = None def onflip(val): if self.algorithm.solution_length == -1: self.info_text.text = 'No solution found' return self.info_text.text = 'Running' if val else f'Found solution of length {self.algorithm.solution_length}' self.running = Switch(False, onflip=onflip) self.misc = {} def remake_tiles(self, grid): size = Vector2D(len(grid), len(grid[0])) for x in range(size.x): for y in range(size.y): position = Vector2D( y * self.tilesize.x + y * self.tilepadding.x + self.position.x + self.padding.x, x * self.tilesize.y + x * self.tilepadding.y + self.position.y + self.padding.y, ) tile = Tile(Tile.int_to_state(self.grid[x][y]), gridpos=Vector2D(x, y), position=position, padding=Vector2D.zero()) tile.size = self.tilesize self.tiles[x][y] = tile def update_tiles(self, positions): for gridposition in positions: x, y = gridposition tile = self.tiles[x][y] tile.state = Tile.int_to_state(self.grid[x][y]) self.tiles[x][y] = tile def clean_grid(self, types, to=Tile.UNVISITED): if Tile.END in types: self.end = None self.info_text.text = 'Select end' if Tile.START in types: self.start = None self.info_text.text = 'Select start' to = Tile.state_to_int(to) size = Vector2D(len(self.grid), len(self.grid[0])) for x in range(size.x): for y in range(size.y): tile = self.grid[x][y] if Tile.int_to_state(tile) in types: self.grid[x][y] = to def update_grid(self): for tile in self.iter_tiles(): x, y = tile.gridpos self.grid[x][y] = Tile.state_to_int(tile.state) def event(self, event): if event.type == pygame.KEYUP: if event.key == pygame.K_SPACE: if not self.drawable.get(): easygui.msgbox('Press either Left Ctrl or Left Shift to clear the current grid' '\nLeft Ctrl: Everything excluding walls' '\nLeft Shift: Everything including walls', 'Clear grid', ok_button='CLOSE') return self.update_grid() if self.start is None or self.end is None: easygui.msgbox('Starting point or End point not specified', 'Missing inputs', 'CLOSE') return self.drawable.set(False) self.algorithm = AStarAlgorithm(self.grid) self.running.set(True) elif event.key == pygame.K_LSHIFT: if self.running.get(): return self.update_grid() self.clean_grid([Tile.VISITED, Tile.START, Tile.END, Tile.PATH, Tile.NEIGHBOURS, Tile.WALL]) self.remake_tiles(self.grid) self.drawable.set(True) elif event.key == pygame.K_LCTRL: if self.running.get(): return self.update_grid() self.clean_grid([Tile.VISITED, Tile.START, Tile.END, Tile.PATH, Tile.NEIGHBOURS]) self.remake_tiles(self.grid) self.drawable.set(True) elif event.key == pygame.K_s: if self.running.get(): return slot = easygui.buttonbox('Pick a slot to load', 'Load', list(key_map.values())) if slot is None: return self.update_grid() self.save(slot) elif event.key == pygame.K_l: if self.running.get(): return slot = easygui.buttonbox('Pick a slot to load', 'Load', list(AppDatabase.database().data.keys())) if slot is None: return self.load(slot) if event.type == pygame.MOUSEBUTTONDOWN: if event.button == 1: self.mouse_left_down.set(True) if self.drawable.get(): tile = self.tile(event.pos) if tile is None: return currentstate = tile.state if currentstate == Tile.WALL: self.mouse_left_down_type = Tile.UNVISITED elif currentstate == Tile.UNVISITED: self.mouse_left_down_type = Tile.WALL if self.mouse_left_down_type is not None: tile.state = self.mouse_left_down_type elif event.button == 3: if self.drawable.get(): tile = self.tile(event.pos) if self.start is None: self.start = tile tile.state = Tile.START self.info_text.text = 'Select end' elif tile.state != Tile.START: self.end = tile tile.state = Tile.END self.info_text.text = 'Ready' if event.type == pygame.MOUSEBUTTONUP: if event.button == 1: self.mouse_left_down.set(False) self.mouse_left_down_type = None if event.type == pygame.MOUSEMOTION: if self.mouse_left_down.get(): if self.drawable.get(): tile = self.tile(event.pos) if tile is None: return if self.mouse_left_down_type is not None: if tile.state in [Tile.START, Tile.END]: return tile.state = self.mouse_left_down_type def update(self): if self.running.get(): try: affected = self.algorithm.next() self.update_tiles(affected) except StopIteration: self.running.set(False) return else: mouse_pos = Vector2D.tuple(pygame.mouse.get_pos()) tile = self.tile(mouse_pos) if tile is None: return if not tile.hover: tile.enter() try: inbound = self.misc['over'] if tile.position != inbound.position: inbound.exit() except KeyError: pass self.misc['over'] = tile keys = pygame.key.get_pressed() if keys[pygame.K_LALT]: for key in key_map.keys(): if keys[key]: value = key_map[key] self.update_grid() self.save(value) break else: for key in key_map.keys(): if keys[key]: value = key_map[key] self.load(value) break def draw(self, surface): surface.blits([self.tiles[x][y].blit_sequence for x in range(self.size.x) for y in range(self.size.y)])
Apache License 2.0
lrq3000/pyfilefixity
pyFileFixity/lib/profilers/visual/pympler/util/bottle2.py
Route.group_re
python
def group_re(self): out = '' for token, data in self.tokens(): if token == 'TXT': out += re.escape(data) elif token == 'VAR': out += '(?P<%s>%s)' % (data[1], data[0]) elif token == 'ANON': out += '(?:%s)' % data return out
Return a regexp pattern with named groups
https://github.com/lrq3000/pyfilefixity/blob/fd5ef23bb13835faf1e3baa773619b86a1cc9bdf/pyFileFixity/lib/profilers/visual/pympler/util/bottle2.py#L225-L232
from __future__ import with_statement __author__ = 'Marcel Hellkamp' __version__ = '0.8.0' __license__ = 'MIT' import base64 import cgi import email.utils import functools import hmac import inspect import itertools import mimetypes import os import re import subprocess import sys import thread import threading import time from Cookie import SimpleCookie from tempfile import TemporaryFile from traceback import format_exc from urllib import quote as urlquote from urlparse import urlunsplit, urljoin try: from collections import MutableMapping as DictMixin except ImportError: from UserDict import DictMixin try: from urlparse import parse_qs except ImportError: from cgi import parse_qs try: import cPickle as pickle except ImportError: import pickle try: try: from json import dumps as json_dumps except ImportError: from simplejson import dumps as json_dumps except ImportError: json_dumps = None if sys.version_info >= (3,0,0): from io import BytesIO from io import TextIOWrapper StringType = bytes def touni(x, enc='utf8'): return str(x, encoding=enc) if isinstance(x, bytes) else str(x) else: from StringIO import StringIO as BytesIO from types import StringType TextIOWrapper = None def touni(x, enc='utf8'): return x if isinstance(x, unicode) else unicode(str(x), encoding=enc) def tob(data, enc='utf8'): return data.encode(enc) if isinstance(data, unicode) else data class BottleException(Exception): pass class HTTPResponse(BottleException): def __init__(self, output='', status=200, header=None): super(BottleException, self).__init__("HTTP Response %d" % status) self.status = int(status) self.output = output self.headers = HeaderDict(header) if header else None def apply(self, response): if self.headers: for key, value in self.headers.iterallitems(): response.headers[key] = value response.status = self.status class HTTPError(HTTPResponse): def __init__(self, code=500, output='Unknown Error', exception=None, traceback=None, header=None): super(HTTPError, self).__init__(output, code, header) self.exception = exception self.traceback = traceback def __repr__(self): return ''.join(ERROR_PAGE_TEMPLATE.render(e=self, DEBUG=DEBUG, HTTP_CODES=HTTP_CODES, request=request)) class RouteError(BottleException): class RouteSyntaxError(RouteError): class RouteBuildError(RouteError): class Route(object): syntax = re.compile(r'(.*?)(?<!\\):([a-zA-Z_]+)?(?:#(.*?)#)?') default = '[^/]+' def __init__(self, route, target, name=None, static=False): self.route = route self.target = target self.name = name self._static = static self._tokens = None def tokens(self): if not self._tokens: self._tokens = list(self.tokenise(self.route)) return self._tokens @classmethod def tokenise(cls, route): match = None for match in cls.syntax.finditer(route): pre, name, rex = match.groups() if pre: yield ('TXT', pre.replace('\\:',':')) if rex and name: yield ('VAR', (rex, name)) elif name: yield ('VAR', (cls.default, name)) elif rex: yield ('ANON', rex) if not match: yield ('TXT', route.replace('\\:',':')) elif match.end() < len(route): yield ('TXT', route[match.end():].replace('\\:',':'))
MIT License
easonnie/semanticretrievalmrs
src/build_rindex/document_analysis.py
get_ngrams
python
def get_ngrams(terms, poss=None, n=1, included_tags=None, as_strings=True): ngrams = [(s, e + 1) for s in range(len(terms)) for e in range(s, min(s + n, len(terms)))] if poss is not None and included_tags is not None: filtered_ngram = [] for (s, e) in ngrams: if any([poss[i] in included_tags for i in range(s, e)]): filtered_ngram.append((s, e)) ngrams = filtered_ngram if as_strings: ngrams = ['{}'.format(' '.join(terms[s:e])) for (s, e) in ngrams] return ngrams
Returns a list of all ngrams from length 1 to n.
https://github.com/easonnie/semanticretrievalmrs/blob/149833a7ca9146ef4a89d90e0b6cedd0ab088e72/src/build_rindex/document_analysis.py#L20-L43
from build_rindex.build_rvindex import InvertedIndex, DocumentLengthTable from hotpot_doc_retri.hotpot_preliminary_doc_retri import STOPWORDS from inspect_wikidump.inspect_whole_file import get_first_paragraph_index from utils import common import config from sqlitedict import SqliteDict import json from nltk import ngrams import spacy from tqdm import tqdm from wiki_util import wiki_db_tool nlp = spacy.load('en') nlp.remove_pipe('tagger') nlp.remove_pipe('parser') nlp.remove_pipe('ner')
MIT License
rasahq/rasa
rasa/core/channels/channel.py
register
python
def register( input_channels: List["InputChannel"], app: Sanic, route: Optional[Text] ) -> None: async def handler(message: UserMessage) -> None: await app.agent.handle_message(message) for channel in input_channels: if route: p = urljoin(route, channel.url_prefix()) else: p = None app.blueprint(channel.blueprint(handler), url_prefix=p) app.input_channels = input_channels
Registers input channel blueprints with Sanic.
https://github.com/rasahq/rasa/blob/83eb56d257c2cd744183a2c5613a489f0a5b15d4/rasa/core/channels/channel.py#L83-L98
import json import logging import uuid import jwt from sanic import Sanic, Blueprint from sanic.request import Request from typing import ( Text, List, Dict, Any, Optional, Callable, Iterable, Awaitable, NoReturn, ) from rasa.cli import utils as cli_utils from rasa.shared.constants import DOCS_BASE_URL, DEFAULT_SENDER_ID from rasa.core.constants import BEARER_TOKEN_PREFIX from rasa.shared.exceptions import RasaException try: from urlparse import urljoin except ImportError: from urllib.parse import urljoin logger = logging.getLogger(__name__) class UserMessage: def __init__( self, text: Optional[Text] = None, output_channel: Optional["OutputChannel"] = None, sender_id: Optional[Text] = None, parse_data: Dict[Text, Any] = None, input_channel: Optional[Text] = None, message_id: Optional[Text] = None, metadata: Optional[Dict] = None, ) -> None: self.text = text.strip() if text else text if message_id is not None: self.message_id = str(message_id) else: self.message_id = uuid.uuid4().hex if output_channel is not None: self.output_channel = output_channel else: self.output_channel = CollectingOutputChannel() if sender_id is not None: self.sender_id = str(sender_id) else: self.sender_id = DEFAULT_SENDER_ID self.input_channel = input_channel self.parse_data = parse_data self.metadata = metadata
Apache License 2.0
googleapis/python-compute
google/cloud/compute_v1/services/firewalls/client.py
FirewallsClient.from_service_account_info
python
def from_service_account_info(cls, info: dict, *args, **kwargs): credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials return cls(*args, **kwargs)
Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: FirewallsClient: The constructed client.
https://github.com/googleapis/python-compute/blob/703ac1703bc159dcd81e96759606ad896f125996/google/cloud/compute_v1/services/firewalls/client.py#L108-L122
from collections import OrderedDict from distutils import util import os import re from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.transport import mtls from google.auth.transport.grpc import SslCredentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.cloud.compute_v1.services.firewalls import pagers from google.cloud.compute_v1.types import compute from .transports.base import FirewallsTransport, DEFAULT_CLIENT_INFO from .transports.rest import FirewallsRestTransport class FirewallsClientMeta(type): _transport_registry = OrderedDict() _transport_registry["rest"] = FirewallsRestTransport def get_transport_class(cls, label: str = None,) -> Type[FirewallsTransport]: if label: return cls._transport_registry[label] return next(iter(cls._transport_registry.values())) class FirewallsClient(metaclass=FirewallsClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint mtls_endpoint_re = re.compile( r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?" ) m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() if mtls or not googledomain: return api_endpoint if sandbox: return api_endpoint.replace( "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") DEFAULT_ENDPOINT = "compute.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( DEFAULT_ENDPOINT ) @classmethod
Apache License 2.0
consensys/mythril
mythril/laser/ethereum/instructions.py
Instruction.addmod_
python
def addmod_(self, global_state: GlobalState) -> List[GlobalState]: s0, s1, s2 = ( util.pop_bitvec(global_state.mstate), util.pop_bitvec(global_state.mstate), util.pop_bitvec(global_state.mstate), ) global_state.mstate.stack.append(URem(URem(s0, s2) + URem(s1, s2), s2)) return [global_state]
:param global_state: :return:
https://github.com/consensys/mythril/blob/df1d4dd0ebbb623054f4708717664dc6e27f76b9/mythril/laser/ethereum/instructions.py#L571-L583
import logging from copy import copy, deepcopy from typing import cast, Callable, List, Union from mythril.laser.smt import ( Extract, Expression, Function, UDiv, simplify, Concat, ULT, UGT, BitVec, is_false, is_true, URem, SRem, If, Bool, Not, LShR, UGE, ) from mythril.laser.smt import symbol_factory from mythril.disassembler.disassembly import Disassembly from mythril.laser.ethereum.state.calldata import ConcreteCalldata, SymbolicCalldata import mythril.laser.ethereum.util as helper from mythril.laser.ethereum import util from mythril.laser.ethereum.function_managers import ( keccak_function_manager, exponent_function_manager, ) from mythril.laser.ethereum.call import ( get_call_parameters, native_call, get_call_data, SYMBOLIC_CALLDATA_SIZE, ) from mythril.laser.ethereum.evm_exceptions import ( VmException, StackUnderflowException, InvalidJumpDestination, InvalidInstruction, OutOfGasException, WriteProtection, ) from mythril.laser.ethereum.instruction_data import get_opcode_gas, calculate_sha3_gas from mythril.laser.ethereum.state.global_state import GlobalState from mythril.laser.ethereum.transaction import ( MessageCallTransaction, TransactionStartSignal, ContractCreationTransaction, get_next_transaction_id, ) from mythril.support.support_utils import get_code_hash from mythril.support.loader import DynLoader log = logging.getLogger(__name__) TT256 = symbol_factory.BitVecVal(0, 256) TT256M1 = symbol_factory.BitVecVal(2 ** 256 - 1, 256) def transfer_ether( global_state: GlobalState, sender: BitVec, receiver: BitVec, value: Union[int, BitVec], ): value = value if isinstance(value, BitVec) else symbol_factory.BitVecVal(value, 256) global_state.world_state.constraints.append( UGE(global_state.world_state.balances[sender], value) ) global_state.world_state.balances[receiver] += value global_state.world_state.balances[sender] -= value class StateTransition(object): def __init__( self, increment_pc=True, enable_gas=True, is_state_mutation_instruction=False ): self.increment_pc = increment_pc self.enable_gas = enable_gas self.is_state_mutation_instruction = is_state_mutation_instruction @staticmethod def call_on_state_copy(func: Callable, func_obj: "Instruction", state: GlobalState): global_state_copy = copy(state) return func(func_obj, global_state_copy) def increment_states_pc(self, states: List[GlobalState]) -> List[GlobalState]: if self.increment_pc: for state in states: state.mstate.pc += 1 return states @staticmethod def check_gas_usage_limit(global_state: GlobalState): global_state.mstate.check_gas() if isinstance(global_state.current_transaction.gas_limit, BitVec): value = global_state.current_transaction.gas_limit.value if value is None: return global_state.current_transaction.gas_limit = value if ( global_state.mstate.min_gas_used >= global_state.current_transaction.gas_limit ): raise OutOfGasException() def accumulate_gas(self, global_state: GlobalState): if not self.enable_gas: return global_state opcode = global_state.instruction["opcode"] min_gas, max_gas = get_opcode_gas(opcode) global_state.mstate.min_gas_used += min_gas global_state.mstate.max_gas_used += max_gas self.check_gas_usage_limit(global_state) return global_state def __call__(self, func: Callable) -> Callable: def wrapper( func_obj: "Instruction", global_state: GlobalState ) -> List[GlobalState]: if self.is_state_mutation_instruction and global_state.environment.static: raise WriteProtection( "The function {} cannot be executed in a static call".format( func.__name__[:-1] ) ) new_global_states = self.call_on_state_copy(func, func_obj, global_state) new_global_states = [ self.accumulate_gas(state) for state in new_global_states ] return self.increment_states_pc(new_global_states) return wrapper class Instruction: def __init__( self, op_code: str, dynamic_loader: DynLoader, pre_hooks: List[Callable] = None, post_hooks: List[Callable] = None, ) -> None: self.dynamic_loader = dynamic_loader self.op_code = op_code.upper() self.pre_hook = pre_hooks if pre_hooks else [] self.post_hook = post_hooks if post_hooks else [] def _execute_pre_hooks(self, global_state: GlobalState): for hook in self.pre_hook: hook(global_state) def _execute_post_hooks(self, global_state: GlobalState): for hook in self.post_hook: hook(global_state) def evaluate(self, global_state: GlobalState, post=False) -> List[GlobalState]: log.debug("Evaluating %s at %i", self.op_code, global_state.mstate.pc) op = self.op_code.lower() if self.op_code.startswith("PUSH"): op = "push" elif self.op_code.startswith("DUP"): op = "dup" elif self.op_code.startswith("SWAP"): op = "swap" elif self.op_code.startswith("LOG"): op = "log" instruction_mutator = ( getattr(self, op + "_", None) if not post else getattr(self, op + "_" + "post", None) ) if instruction_mutator is None: raise NotImplementedError self._execute_pre_hooks(global_state) result = instruction_mutator(global_state) self._execute_post_hooks(global_state) return result @StateTransition() def jumpdest_(self, global_state: GlobalState) -> List[GlobalState]: return [global_state] @StateTransition() def push_(self, global_state: GlobalState) -> List[GlobalState]: push_instruction = global_state.get_current_instruction() push_value = push_instruction["argument"][2:] try: length_of_value = 2 * int(push_instruction["opcode"][4:]) except ValueError: raise VmException("Invalid Push instruction") push_value += "0" * max(length_of_value - len(push_value), 0) global_state.mstate.stack.append( symbol_factory.BitVecVal(int(push_value, 16), 256) ) return [global_state] @StateTransition() def dup_(self, global_state: GlobalState) -> List[GlobalState]: value = int(global_state.get_current_instruction()["opcode"][3:], 10) global_state.mstate.stack.append(global_state.mstate.stack[-value]) return [global_state] @StateTransition() def swap_(self, global_state: GlobalState) -> List[GlobalState]: depth = int(self.op_code[4:]) stack = global_state.mstate.stack stack[-depth - 1], stack[-1] = stack[-1], stack[-depth - 1] return [global_state] @StateTransition() def pop_(self, global_state: GlobalState) -> List[GlobalState]: global_state.mstate.stack.pop() return [global_state] @StateTransition() def and_(self, global_state: GlobalState) -> List[GlobalState]: stack = global_state.mstate.stack op1, op2 = stack.pop(), stack.pop() if isinstance(op1, Bool): op1 = If( op1, symbol_factory.BitVecVal(1, 256), symbol_factory.BitVecVal(0, 256) ) if isinstance(op2, Bool): op2 = If( op2, symbol_factory.BitVecVal(1, 256), symbol_factory.BitVecVal(0, 256) ) if not isinstance(op1, Expression): op1 = symbol_factory.BitVecVal(op1, 256) if not isinstance(op2, Expression): op2 = symbol_factory.BitVecVal(op2, 256) stack.append(op1 & op2) return [global_state] @StateTransition() def or_(self, global_state: GlobalState) -> List[GlobalState]: stack = global_state.mstate.stack op1, op2 = stack.pop(), stack.pop() if isinstance(op1, Bool): op1 = If( op1, symbol_factory.BitVecVal(1, 256), symbol_factory.BitVecVal(0, 256) ) if isinstance(op2, Bool): op2 = If( op2, symbol_factory.BitVecVal(1, 256), symbol_factory.BitVecVal(0, 256) ) stack.append(op1 | op2) return [global_state] @StateTransition() def xor_(self, global_state: GlobalState) -> List[GlobalState]: mstate = global_state.mstate mstate.stack.append(mstate.stack.pop() ^ mstate.stack.pop()) return [global_state] @StateTransition() def not_(self, global_state: GlobalState): mstate = global_state.mstate mstate.stack.append(TT256M1 - mstate.stack.pop()) return [global_state] @StateTransition() def byte_(self, global_state: GlobalState) -> List[GlobalState]: mstate = global_state.mstate op0, op1 = mstate.stack.pop(), mstate.stack.pop() if not isinstance(op1, Expression): op1 = symbol_factory.BitVecVal(op1, 256) try: index = util.get_concrete_int(op0) offset = (31 - index) * 8 if offset >= 0: result = simplify( Concat( symbol_factory.BitVecVal(0, 248), Extract(offset + 7, offset, op1), ) ) else: result = 0 except TypeError: log.debug("BYTE: Unsupported symbolic byte offset") result = global_state.new_bitvec( str(simplify(op1)) + "[" + str(simplify(op0)) + "]", 256 ) mstate.stack.append(result) return [global_state] @StateTransition() def add_(self, global_state: GlobalState) -> List[GlobalState]: global_state.mstate.stack.append( ( helper.pop_bitvec(global_state.mstate) + helper.pop_bitvec(global_state.mstate) ) ) return [global_state] @StateTransition() def sub_(self, global_state: GlobalState) -> List[GlobalState]: global_state.mstate.stack.append( ( helper.pop_bitvec(global_state.mstate) - helper.pop_bitvec(global_state.mstate) ) ) return [global_state] @StateTransition() def mul_(self, global_state: GlobalState) -> List[GlobalState]: global_state.mstate.stack.append( ( helper.pop_bitvec(global_state.mstate) * helper.pop_bitvec(global_state.mstate) ) ) return [global_state] @StateTransition() def div_(self, global_state: GlobalState) -> List[GlobalState]: op0, op1 = ( util.pop_bitvec(global_state.mstate), util.pop_bitvec(global_state.mstate), ) if op1 == 0: global_state.mstate.stack.append(symbol_factory.BitVecVal(0, 256)) else: global_state.mstate.stack.append(UDiv(op0, op1)) return [global_state] @StateTransition() def sdiv_(self, global_state: GlobalState) -> List[GlobalState]: s0, s1 = ( util.pop_bitvec(global_state.mstate), util.pop_bitvec(global_state.mstate), ) if s1 == 0: global_state.mstate.stack.append(symbol_factory.BitVecVal(0, 256)) else: global_state.mstate.stack.append(s0 / s1) return [global_state] @StateTransition() def mod_(self, global_state: GlobalState) -> List[GlobalState]: s0, s1 = ( util.pop_bitvec(global_state.mstate), util.pop_bitvec(global_state.mstate), ) global_state.mstate.stack.append(0 if s1 == 0 else URem(s0, s1)) return [global_state] @StateTransition() def shl_(self, global_state: GlobalState) -> List[GlobalState]: shift, value = ( util.pop_bitvec(global_state.mstate), util.pop_bitvec(global_state.mstate), ) global_state.mstate.stack.append(value << shift) return [global_state] @StateTransition() def shr_(self, global_state: GlobalState) -> List[GlobalState]: shift, value = ( util.pop_bitvec(global_state.mstate), util.pop_bitvec(global_state.mstate), ) global_state.mstate.stack.append(LShR(value, shift)) return [global_state] @StateTransition() def sar_(self, global_state: GlobalState) -> List[GlobalState]: shift, value = ( util.pop_bitvec(global_state.mstate), util.pop_bitvec(global_state.mstate), ) global_state.mstate.stack.append(value >> shift) return [global_state] @StateTransition() def smod_(self, global_state: GlobalState) -> List[GlobalState]: s0, s1 = ( util.pop_bitvec(global_state.mstate), util.pop_bitvec(global_state.mstate), ) global_state.mstate.stack.append(0 if s1 == 0 else SRem(s0, s1)) return [global_state] @StateTransition()
MIT License
ibm/mi-prometheus
miprometheus/models/mental_model/memory.py
Memory.subset_similarity
python
def subset_similarity(self, key, subset_attention): key = torch.nn.functional.normalize(key,dim=-1) norm_mem = torch.nn.functional.normalize(self.memory + 1e-12,dim=-1) content_address = torch.sum(key.unsqueeze(1) * norm_mem * subset_attention.unsqueeze(1),-1) return content_address
Returns the similarity of a key to objects in memory. Only compares a subset of the full object vector. :param key: Key to compare similarity to. :type key: torch.Tensor :param subset_attention: Subset of the object and key vectors to compare. :type subset_attention: torch.Tensor
https://github.com/ibm/mi-prometheus/blob/a8e8a5b339598b0637a251834c560bc24d5a9500/miprometheus/models/mental_model/memory.py#L108-L133
import torch import torch.nn as nn class Memory(nn.Module): def __init__(self,mem_slots,object_size,controller_out_size,app_state): super(Memory,self).__init__() self.mem_slots = mem_slots self.object_size = object_size self.dtype = app_state.dtype self.read_keygen = torch.nn.Linear(controller_out_size, object_size) self.read_subset_gen= torch.nn.Linear(controller_out_size, object_size) self.read_mix_gen= torch.nn.Linear(controller_out_size, 2) self.read_location = torch.nn.Linear(controller_out_size, mem_slots) self.read_gate = torch.nn.Linear(controller_out_size,object_size) self.read_sharpen = torch.nn.Linear(controller_out_size,1) self.erase_keygen = torch.nn.Linear(controller_out_size, object_size) self.erase_subset_gen = torch.nn.Linear(controller_out_size, object_size) self.erase_mix_gen= torch.nn.Linear(controller_out_size, 2) self.erase_location = torch.nn.Linear(controller_out_size, mem_slots) self.erase_gate = torch.nn.Linear(controller_out_size,object_size) self.erase_sharpen = torch.nn.Linear(controller_out_size,1) self.write_keygen = torch.nn.Linear(controller_out_size, object_size) self.write_subset_gen = torch.nn.Linear(controller_out_size, object_size) self.write_mix_gen= torch.nn.Linear(controller_out_size, 2) self.write_location = torch.nn.Linear(controller_out_size, mem_slots) self.write_gate = torch.nn.Linear(controller_out_size,object_size) self.write_sharpen = torch.nn.Linear(controller_out_size,1) nn.init.xavier_uniform_(self.read_keygen.weight) nn.init.xavier_uniform_(self.read_subset_gen.weight, gain=nn.init.calculate_gain('sigmoid')) nn.init.xavier_uniform_(self.read_mix_gen.weight) nn.init.xavier_uniform_(self.read_location.weight) nn.init.xavier_uniform_(self.read_gate.weight, gain=nn.init.calculate_gain('sigmoid')) nn.init.xavier_uniform_(self.read_sharpen.weight) self.read_keygen.bias.data.fill_(0.01) self.read_subset_gen.bias.data.fill_(0.01) self.read_mix_gen.bias.data.fill_(0.01) self.read_location.bias.data.fill_(0.01) self.read_gate.bias.data.fill_(0.01) self.read_sharpen.bias.data.fill_(1.00) nn.init.xavier_uniform_(self.erase_keygen.weight) nn.init.xavier_uniform_(self.erase_subset_gen.weight, gain=nn.init.calculate_gain('sigmoid')) nn.init.xavier_uniform_(self.erase_mix_gen.weight) nn.init.xavier_uniform_(self.erase_location.weight) nn.init.xavier_uniform_(self.erase_gate.weight, gain=nn.init.calculate_gain('sigmoid')) nn.init.xavier_uniform_(self.erase_sharpen.weight) self.erase_keygen.bias.data.fill_(0.01) self.erase_subset_gen.bias.data.fill_(0.01) self.erase_mix_gen.bias.data.fill_(0.01) self.erase_location.bias.data.fill_(0.01) self.erase_gate.bias.data.fill_(0.01) self.erase_sharpen.bias.data.fill_(1.00) nn.init.xavier_uniform_(self.write_keygen.weight) nn.init.xavier_uniform_(self.write_subset_gen.weight, gain=nn.init.calculate_gain('sigmoid')) nn.init.xavier_uniform_(self.write_mix_gen.weight) nn.init.xavier_uniform_(self.write_location.weight) nn.init.xavier_uniform_(self.write_gate.weight, gain=nn.init.calculate_gain('sigmoid')) nn.init.xavier_uniform_(self.write_sharpen.weight) self.write_keygen.bias.data.fill_(0.01) self.write_subset_gen.bias.data.fill_(0.01) self.write_mix_gen.bias.data.fill_(0.01) self.write_location.bias.data.fill_(0.01) self.write_gate.bias.data.fill_(0.01) self.write_sharpen.bias.data.fill_(1.00)
Apache License 2.0
chaffelson/whoville
whoville/cloudbreak/models/autoscale_cluster_response.py
AutoscaleClusterResponse.blueprint
python
def blueprint(self, blueprint): self._blueprint = blueprint
Sets the blueprint of this AutoscaleClusterResponse. blueprint for the cluster :param blueprint: The blueprint of this AutoscaleClusterResponse. :type: BlueprintResponse
https://github.com/chaffelson/whoville/blob/f71fda629c9fd50d0a482120165ea5abcc754522/whoville/cloudbreak/models/autoscale_cluster_response.py#L425-L434
from pprint import pformat from six import iteritems import re class AutoscaleClusterResponse(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'id': 'int', 'name': 'str', 'status': 'str', 'hours_up': 'int', 'minutes_up': 'int', 'cluster': 'str', 'blueprint_id': 'int', 'blueprint': 'BlueprintResponse', 'description': 'str', 'status_reason': 'str', 'ambari_server_ip': 'str', 'ambari_server_url': 'str', 'user_name': 'str', 'secure': 'bool', 'host_groups': 'list[HostGroupResponse]', 'rds_config_ids': 'list[int]', 'rds_configs': 'list[RDSConfigResponse]', 'proxy_name': 'str', 'cluster_exposed_services_for_topologies': 'dict(str, list[ClusterExposedServiceResponse])', 'config_strategy': 'str', 'ldap_config_id': 'int', 'ldap_config': 'LdapConfigResponse', 'attributes': 'dict(str, object)', 'blueprint_inputs': 'list[BlueprintInput]', 'blueprint_custom_properties': 'str', 'executor_type': 'str', 'gateway': 'GatewayJson', 'custom_containers': 'CustomContainerResponse', 'ambari_stack_details': 'AmbariStackDetailsResponse', 'ambari_repo_details_json': 'AmbariRepoDetails', 'ambari_database_details': 'AmbariDatabaseDetails', 'custom_queue': 'str', 'creation_finished': 'int', 'kerberos_response': 'KerberosResponse', 'uptime': 'int', 'extended_blueprint_text': 'str', 'shared_service_response': 'SharedServiceResponse', 'file_system_response': 'FileSystemResponse', 'workspace': 'WorkspaceResourceResponse', 'password': 'str' } attribute_map = { 'id': 'id', 'name': 'name', 'status': 'status', 'hours_up': 'hoursUp', 'minutes_up': 'minutesUp', 'cluster': 'cluster', 'blueprint_id': 'blueprintId', 'blueprint': 'blueprint', 'description': 'description', 'status_reason': 'statusReason', 'ambari_server_ip': 'ambariServerIp', 'ambari_server_url': 'ambariServerUrl', 'user_name': 'userName', 'secure': 'secure', 'host_groups': 'hostGroups', 'rds_config_ids': 'rdsConfigIds', 'rds_configs': 'rdsConfigs', 'proxy_name': 'proxyName', 'cluster_exposed_services_for_topologies': 'clusterExposedServicesForTopologies', 'config_strategy': 'configStrategy', 'ldap_config_id': 'ldapConfigId', 'ldap_config': 'ldapConfig', 'attributes': 'attributes', 'blueprint_inputs': 'blueprintInputs', 'blueprint_custom_properties': 'blueprintCustomProperties', 'executor_type': 'executorType', 'gateway': 'gateway', 'custom_containers': 'customContainers', 'ambari_stack_details': 'ambariStackDetails', 'ambari_repo_details_json': 'ambariRepoDetailsJson', 'ambari_database_details': 'ambariDatabaseDetails', 'custom_queue': 'customQueue', 'creation_finished': 'creationFinished', 'kerberos_response': 'kerberosResponse', 'uptime': 'uptime', 'extended_blueprint_text': 'extendedBlueprintText', 'shared_service_response': 'sharedServiceResponse', 'file_system_response': 'fileSystemResponse', 'workspace': 'workspace', 'password': 'password' } def __init__(self, id=None, name=None, status=None, hours_up=None, minutes_up=None, cluster=None, blueprint_id=None, blueprint=None, description=None, status_reason=None, ambari_server_ip=None, ambari_server_url=None, user_name=None, secure=False, host_groups=None, rds_config_ids=None, rds_configs=None, proxy_name=None, cluster_exposed_services_for_topologies=None, config_strategy=None, ldap_config_id=None, ldap_config=None, attributes=None, blueprint_inputs=None, blueprint_custom_properties=None, executor_type=None, gateway=None, custom_containers=None, ambari_stack_details=None, ambari_repo_details_json=None, ambari_database_details=None, custom_queue=None, creation_finished=None, kerberos_response=None, uptime=None, extended_blueprint_text=None, shared_service_response=None, file_system_response=None, workspace=None, password=None): self._id = None self._name = None self._status = None self._hours_up = None self._minutes_up = None self._cluster = None self._blueprint_id = None self._blueprint = None self._description = None self._status_reason = None self._ambari_server_ip = None self._ambari_server_url = None self._user_name = None self._secure = None self._host_groups = None self._rds_config_ids = None self._rds_configs = None self._proxy_name = None self._cluster_exposed_services_for_topologies = None self._config_strategy = None self._ldap_config_id = None self._ldap_config = None self._attributes = None self._blueprint_inputs = None self._blueprint_custom_properties = None self._executor_type = None self._gateway = None self._custom_containers = None self._ambari_stack_details = None self._ambari_repo_details_json = None self._ambari_database_details = None self._custom_queue = None self._creation_finished = None self._kerberos_response = None self._uptime = None self._extended_blueprint_text = None self._shared_service_response = None self._file_system_response = None self._workspace = None self._password = None if id is not None: self.id = id if name is not None: self.name = name if status is not None: self.status = status if hours_up is not None: self.hours_up = hours_up if minutes_up is not None: self.minutes_up = minutes_up if cluster is not None: self.cluster = cluster if blueprint_id is not None: self.blueprint_id = blueprint_id if blueprint is not None: self.blueprint = blueprint if description is not None: self.description = description if status_reason is not None: self.status_reason = status_reason if ambari_server_ip is not None: self.ambari_server_ip = ambari_server_ip if ambari_server_url is not None: self.ambari_server_url = ambari_server_url if user_name is not None: self.user_name = user_name if secure is not None: self.secure = secure if host_groups is not None: self.host_groups = host_groups if rds_config_ids is not None: self.rds_config_ids = rds_config_ids if rds_configs is not None: self.rds_configs = rds_configs if proxy_name is not None: self.proxy_name = proxy_name if cluster_exposed_services_for_topologies is not None: self.cluster_exposed_services_for_topologies = cluster_exposed_services_for_topologies if config_strategy is not None: self.config_strategy = config_strategy if ldap_config_id is not None: self.ldap_config_id = ldap_config_id if ldap_config is not None: self.ldap_config = ldap_config if attributes is not None: self.attributes = attributes if blueprint_inputs is not None: self.blueprint_inputs = blueprint_inputs if blueprint_custom_properties is not None: self.blueprint_custom_properties = blueprint_custom_properties if executor_type is not None: self.executor_type = executor_type if gateway is not None: self.gateway = gateway if custom_containers is not None: self.custom_containers = custom_containers if ambari_stack_details is not None: self.ambari_stack_details = ambari_stack_details if ambari_repo_details_json is not None: self.ambari_repo_details_json = ambari_repo_details_json if ambari_database_details is not None: self.ambari_database_details = ambari_database_details if custom_queue is not None: self.custom_queue = custom_queue if creation_finished is not None: self.creation_finished = creation_finished if kerberos_response is not None: self.kerberos_response = kerberos_response if uptime is not None: self.uptime = uptime if extended_blueprint_text is not None: self.extended_blueprint_text = extended_blueprint_text if shared_service_response is not None: self.shared_service_response = shared_service_response if file_system_response is not None: self.file_system_response = file_system_response if workspace is not None: self.workspace = workspace if password is not None: self.password = password @property def id(self): return self._id @id.setter def id(self, id): self._id = id @property def name(self): return self._name @name.setter def name(self, name): self._name = name @property def status(self): return self._status @status.setter def status(self, status): allowed_values = ["REQUESTED", "CREATE_IN_PROGRESS", "AVAILABLE", "UPDATE_IN_PROGRESS", "UPDATE_REQUESTED", "UPDATE_FAILED", "CREATE_FAILED", "ENABLE_SECURITY_FAILED", "PRE_DELETE_IN_PROGRESS", "DELETE_IN_PROGRESS", "DELETE_FAILED", "DELETE_COMPLETED", "STOPPED", "STOP_REQUESTED", "START_REQUESTED", "STOP_IN_PROGRESS", "START_IN_PROGRESS", "START_FAILED", "STOP_FAILED", "WAIT_FOR_SYNC", "MAINTENANCE_MODE_ENABLED"] if status not in allowed_values: raise ValueError( "Invalid value for `status` ({0}), must be one of {1}" .format(status, allowed_values) ) self._status = status @property def hours_up(self): return self._hours_up @hours_up.setter def hours_up(self, hours_up): self._hours_up = hours_up @property def minutes_up(self): return self._minutes_up @minutes_up.setter def minutes_up(self, minutes_up): self._minutes_up = minutes_up @property def cluster(self): return self._cluster @cluster.setter def cluster(self, cluster): self._cluster = cluster @property def blueprint_id(self): return self._blueprint_id @blueprint_id.setter def blueprint_id(self, blueprint_id): self._blueprint_id = blueprint_id @property def blueprint(self): return self._blueprint @blueprint.setter
Apache License 2.0
federatedai/practicing-federated-learning
chapter15_Homomorphic_Encryption/paillier.py
EncryptedNumber._add_encoded
python
def _add_encoded(self, encoded): if self.public_key != encoded.public_key: raise ValueError("Attempted to add numbers encoded against " "different public keys!") a, b = self, encoded if a.exponent > b.exponent: a = self.decrease_exponent_to(b.exponent) elif a.exponent < b.exponent: b = b.decrease_exponent_to(a.exponent) encrypted_scalar = a.public_key.raw_encrypt(b.encoding, 1) sum_ciphertext = a._raw_add(a.ciphertext(False), encrypted_scalar) return EncryptedNumber(a.public_key, sum_ciphertext, a.exponent)
Returns E(a + b), given self=E(a) and b. Args: encoded (EncodedNumber): an :class:`EncodedNumber` to be added to `self`. Returns: EncryptedNumber: E(a + b), calculated by encrypting b and taking the product of E(a) and E(b) modulo :attr:`~PaillierPublicKey.n` ** 2. Raises: ValueError: if scalar is out of range or precision.
https://github.com/federatedai/practicing-federated-learning/blob/edc3c63fbd958abc39aaae97efa42dd0c44d98da/chapter15_Homomorphic_Encryption/paillier.py#L635-L666
import random try: from collections.abc import Mapping except ImportError: Mapping = dict from encoding import EncodedNumber from util import invert, powmod, getprimeover, isqrt DEFAULT_KEYSIZE = 2048 def generate_paillier_keypair(private_keyring=None, n_length=DEFAULT_KEYSIZE): p = q = n = None n_len = 0 while n_len != n_length: p = getprimeover(n_length // 2) q = p while q == p: q = getprimeover(n_length // 2) n = p * q n_len = n.bit_length() public_key = PaillierPublicKey(n) private_key = PaillierPrivateKey(public_key, p, q) if private_keyring is not None: private_keyring.add(private_key) return public_key, private_key class PaillierPublicKey(object): def __init__(self, n): self.g = n + 1 self.n = n self.nsquare = n * n self.max_int = n // 3 - 1 def __repr__(self): publicKeyHash = hex(hash(self))[2:] return "<PaillierPublicKey {}>".format(publicKeyHash[:10]) def __eq__(self, other): return self.n == other.n def __hash__(self): return hash(self.n) def raw_encrypt(self, plaintext, r_value=None): if not isinstance(plaintext, int): raise TypeError('Expected int type plaintext but got: %s' % type(plaintext)) if self.n - self.max_int <= plaintext < self.n: neg_plaintext = self.n - plaintext neg_ciphertext = (self.n * neg_plaintext + 1) % self.nsquare nude_ciphertext = invert(neg_ciphertext, self.nsquare) else: nude_ciphertext = (self.n * plaintext + 1) % self.nsquare r = r_value or self.get_random_lt_n() obfuscator = powmod(r, self.n, self.nsquare) return (nude_ciphertext * obfuscator) % self.nsquare def get_random_lt_n(self): return random.SystemRandom().randrange(1, self.n) def encrypt(self, value, precision=None, r_value=None): if isinstance(value, EncodedNumber): encoding = value else: encoding = EncodedNumber.encode(self, value, precision) return self.encrypt_encoded(encoding, r_value) def encrypt_encoded(self, encoding, r_value): obfuscator = r_value or 1 ciphertext = self.raw_encrypt(encoding.encoding, r_value=obfuscator) encrypted_number = EncryptedNumber(self, ciphertext, encoding.exponent) if r_value is None: encrypted_number.obfuscate() return encrypted_number class PaillierPrivateKey(object): def __init__(self, public_key, p, q): if not p*q == public_key.n: raise ValueError('given public key does not match the given p and q.') if p == q: raise ValueError('p and q have to be different') self.public_key = public_key if q < p: self.p = q self.q = p else: self.p = p self.q = q self.psquare = self.p * self.p self.qsquare = self.q * self.q self.p_inverse = invert(self.p, self.q) self.hp = self.h_function(self.p, self.psquare) self.hq = self.h_function(self.q, self.qsquare) @staticmethod def from_totient(public_key, totient): p_plus_q = public_key.n - totient + 1 p_minus_q = isqrt(p_plus_q * p_plus_q - public_key.n * 4) q = (p_plus_q - p_minus_q) // 2 p = p_plus_q - q if not p*q == public_key.n: raise ValueError('given public key and totient do not match.') return PaillierPrivateKey(public_key, p, q) def __repr__(self): pub_repr = repr(self.public_key) return "<PaillierPrivateKey for {}>".format(pub_repr) def decrypt(self, encrypted_number): encoded = self.decrypt_encoded(encrypted_number) return encoded.decode() def decrypt_encoded(self, encrypted_number, Encoding=None): if not isinstance(encrypted_number, EncryptedNumber): raise TypeError('Expected encrypted_number to be an EncryptedNumber' ' not: %s' % type(encrypted_number)) if self.public_key != encrypted_number.public_key: raise ValueError('encrypted_number was encrypted against a ' 'different key!') if Encoding is None: Encoding = EncodedNumber encoded = self.raw_decrypt(encrypted_number.ciphertext(be_secure=False)) return Encoding(self.public_key, encoded, encrypted_number.exponent) def raw_decrypt(self, ciphertext): if not isinstance(ciphertext, int): raise TypeError('Expected ciphertext to be an int, not: %s' % type(ciphertext)) decrypt_to_p = self.l_function(powmod(ciphertext, self.p-1, self.psquare), self.p) * self.hp % self.p decrypt_to_q = self.l_function(powmod(ciphertext, self.q-1, self.qsquare), self.q) * self.hq % self.q return self.crt(decrypt_to_p, decrypt_to_q) def h_function(self, x, xsquare): return invert(self.l_function(powmod(self.public_key.g, x - 1, xsquare),x), x) def l_function(self, x, p): return (x - 1) // p def crt(self, mp, mq): u = (mq - mp) * self.p_inverse % self.q return mp + (u * self.p) def __eq__(self, other): return self.p == other.p and self.q == other.q def __hash__(self): return hash((self.p, self.q)) class PaillierPrivateKeyring(Mapping): def __init__(self, private_keys=None): if private_keys is None: private_keys = [] public_keys = [k.public_key for k in private_keys] self.__keyring = dict(zip(public_keys, private_keys)) def __getitem__(self, key): return self.__keyring[key] def __len__(self): return len(self.__keyring) def __iter__(self): return iter(self.__keyring) def __delitem__(self, public_key): del self.__keyring[public_key] def add(self, private_key): if not isinstance(private_key, PaillierPrivateKey): raise TypeError("private_key should be of type PaillierPrivateKey, " "not %s" % type(private_key)) self.__keyring[private_key.public_key] = private_key def decrypt(self, encrypted_number): relevant_private_key = self.__keyring[encrypted_number.public_key] return relevant_private_key.decrypt(encrypted_number) class EncryptedNumber(object): def __init__(self, public_key, ciphertext, exponent=0): self.public_key = public_key self.__ciphertext = ciphertext self.exponent = exponent self.__is_obfuscated = False if isinstance(self.ciphertext, EncryptedNumber): raise TypeError('ciphertext should be an integer') if not isinstance(self.public_key, PaillierPublicKey): raise TypeError('public_key should be a PaillierPublicKey') def __add__(self, other): if isinstance(other, EncryptedNumber): return self._add_encrypted(other) elif isinstance(other, EncodedNumber): return self._add_encoded(other) else: return self._add_scalar(other) def __radd__(self, other): return self.__add__(other) def __mul__(self, other): if isinstance(other, EncryptedNumber): raise NotImplementedError('Good luck with that...') if isinstance(other, EncodedNumber): encoding = other else: encoding = EncodedNumber.encode(self.public_key, other) product = self._raw_mul(encoding.encoding) exponent = self.exponent + encoding.exponent return EncryptedNumber(self.public_key, product, exponent) def __rmul__(self, other): return self.__mul__(other) def __sub__(self, other): return self + (other * -1) def __rsub__(self, other): return other + (self * -1) def __truediv__(self, scalar): return self.__mul__(1 / scalar) def ciphertext(self, be_secure=True): if be_secure and not self.__is_obfuscated: self.obfuscate() return self.__ciphertext def decrease_exponent_to(self, new_exp): if new_exp > self.exponent: raise ValueError('New exponent %i should be more negative than ' 'old exponent %i' % (new_exp, self.exponent)) multiplied = self * pow(EncodedNumber.BASE, self.exponent - new_exp) multiplied.exponent = new_exp return multiplied def obfuscate(self): r = self.public_key.get_random_lt_n() r_pow_n = powmod(r, self.public_key.n, self.public_key.nsquare) self.__ciphertext = self.__ciphertext * r_pow_n % self.public_key.nsquare self.__is_obfuscated = True def _add_scalar(self, scalar): encoded = EncodedNumber.encode(self.public_key, scalar, max_exponent=self.exponent) return self._add_encoded(encoded)
Apache License 2.0
eric3911/mini_ssd
object_detection/core/batcher.py
BatchQueue.__init__
python
def __init__(self, tensor_dict, batch_size, batch_queue_capacity, num_batch_queue_threads, prefetch_queue_capacity): static_shapes = collections.OrderedDict( {key: tensor.get_shape() for key, tensor in tensor_dict.items()}) runtime_shapes = collections.OrderedDict( {(key + rt_shape_str): tf.shape(tensor) for key, tensor in tensor_dict.items()}) all_tensors = tensor_dict all_tensors.update(runtime_shapes) batched_tensors = tf.train.batch( all_tensors, capacity=batch_queue_capacity, batch_size=batch_size, dynamic_pad=True, num_threads=num_batch_queue_threads) self._queue = prefetcher.prefetch(batched_tensors, prefetch_queue_capacity) self._static_shapes = static_shapes self._batch_size = batch_size
Constructs a batch queue holding tensor_dict. Args: tensor_dict: dictionary of tensors to batch. batch_size: batch size. batch_queue_capacity: max capacity of the queue from which the tensors are batched. num_batch_queue_threads: number of threads to use for batching. prefetch_queue_capacity: max capacity of the queue used to prefetch assembled batches.
https://github.com/eric3911/mini_ssd/blob/6fb6e1bce3ab6e4adb832b37e78325803c7424b6/object_detection/core/batcher.py#L68-L101
import collections import tensorflow as tf from object_detection.core import prefetcher rt_shape_str = '_runtime_shapes' class BatchQueue(object):
MIT License
jamesbornholt/plotty
debug_toolbar/views.py
sql_explain
python
def sql_explain(request): from debug_toolbar.panels.sql import reformat_sql sql = request.GET.get('sql', '') params = request.GET.get('params', '') hash = sha_constructor(settings.SECRET_KEY + sql + params).hexdigest() if hash != request.GET.get('hash', ''): return HttpResponseBadRequest('Tamper alert') if sql.lower().strip().startswith('select'): params = simplejson.loads(params) cursor = connection.cursor() if settings.DATABASE_ENGINE == "sqlite3": cursor.execute("EXPLAIN QUERY PLAN %s" % (sql,), params) else: cursor.execute("EXPLAIN %s" % (sql,), params) headers = [d[0] for d in cursor.description] result = cursor.fetchall() cursor.close() context = { 'result': result, 'sql': reformat_sql(cursor.db.ops.last_executed_query(cursor, sql, params)), 'duration': request.GET.get('duration', 0.0), 'headers': headers, } return render_to_response('debug_toolbar/panels/sql_explain.html', context) raise InvalidSQLError("Only 'select' queries are allowed.")
Returns the output of the SQL EXPLAIN on the given query. Expected GET variables: sql: urlencoded sql with positional arguments params: JSON encoded parameter values duration: time for SQL to execute passed in from toolbar just for redisplay hash: the hash of (secret + sql + params) for tamper checking
https://github.com/jamesbornholt/plotty/blob/1c7a535d25f6c7779ec7c85af41608f7c51d9c40/debug_toolbar/views.py#L61-L99
import os import django.views.static from django.conf import settings from django.db import connection from django.http import HttpResponseBadRequest from django.shortcuts import render_to_response from django.utils import simplejson from django.utils.hashcompat import sha_constructor class InvalidSQLError(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) def debug_media(request, path): root = getattr(settings, 'DEBUG_TOOLBAR_MEDIA_ROOT', None) if root is None: parent = os.path.abspath(os.path.dirname(__file__)) root = os.path.join(parent, 'media', 'debug_toolbar') return django.views.static.serve(request, path, root) def sql_select(request): from debug_toolbar.panels.sql import reformat_sql sql = request.GET.get('sql', '') params = request.GET.get('params', '') hash = sha_constructor(settings.SECRET_KEY + sql + params).hexdigest() if hash != request.GET.get('hash', ''): return HttpResponseBadRequest('Tamper alert') if sql.lower().strip().startswith('select'): params = simplejson.loads(params) cursor = connection.cursor() cursor.execute(sql, params) headers = [d[0] for d in cursor.description] result = cursor.fetchall() cursor.close() context = { 'result': result, 'sql': reformat_sql(cursor.db.ops.last_executed_query(cursor, sql, params)), 'duration': request.GET.get('duration', 0.0), 'headers': headers, } return render_to_response('debug_toolbar/panels/sql_select.html', context) raise InvalidSQLError("Only 'select' queries are allowed.")
MIT License
zzzeek/sqlalchemy
lib/sqlalchemy/pool/events.py
PoolEvents.close
python
def close(self, dbapi_connection, connection_record):
Called when a DBAPI connection is closed. The event is emitted before the close occurs. The close of a connection can fail; typically this is because the connection is already closed. If the close operation fails, the connection is discarded. The :meth:`.close` event corresponds to a connection that's still associated with the pool. To intercept close events for detached connections use :meth:`.close_detached`. .. versionadded:: 1.1 :param dbapi_connection: a DBAPI connection. The :attr:`._ConnectionRecord.dbapi_connection` attribute. :param connection_record: the :class:`._ConnectionRecord` managing the DBAPI connection.
https://github.com/zzzeek/sqlalchemy/blob/979ea6b21f71605314dc0ac1231dd385eced98c4/lib/sqlalchemy/pool/events.py#L225-L246
from .base import Pool from .. import event from ..engine.base import Engine class PoolEvents(event.Events): _target_class_doc = "SomeEngineOrPool" _dispatch_target = Pool @classmethod def _accept_with(cls, target): if isinstance(target, type): if issubclass(target, Engine): return Pool elif issubclass(target, Pool): return target elif isinstance(target, Engine): return target.pool else: return target @classmethod def _listen(cls, event_key, **kw): target = event_key.dispatch_target kw.setdefault("asyncio", target._is_asyncio) event_key.base_listen(**kw) def connect(self, dbapi_connection, connection_record): def first_connect(self, dbapi_connection, connection_record): def checkout(self, dbapi_connection, connection_record, connection_proxy): def checkin(self, dbapi_connection, connection_record): def reset(self, dbapi_connection, connection_record): def invalidate(self, dbapi_connection, connection_record, exception): def soft_invalidate(self, dbapi_connection, connection_record, exception):
MIT License
jcmgray/autoray
autoray/lazy/core.py
LazyArray.from_data
python
def from_data(cls, data): obj = cls.__new__(cls) obj._backend = infer_backend(data) obj._fn = obj._args = obj._kwargs = None obj._shape = tuple(map(int, data.shape)) obj._dtype = get_dtype_name(data) obj._data = data obj._deps = () return obj
Create a new ``LazyArray`` directly from a concrete array.
https://github.com/jcmgray/autoray/blob/35677037863d7d0d25ff025998d9fda75dce3b44/autoray/lazy/core.py#L63-L73
import operator import threading import functools import itertools import contextlib import collections import numpy as np from ..autoray import ( get_lib_fn, infer_backend, get_dtype_name, register_function, astype, ) _EMPTY_DICT = {} class LazyArray: __slots__ = ( "_backend", "_fn", "_args", "_kwargs", "_shape", "_dtype", "_data", "_deps", ) def __init__( self, backend, fn, args, kwargs, shape, dtype, deps=None, ): self._backend = backend self._fn = fn self._args = args if kwargs is None: self._kwargs = _EMPTY_DICT else: self._kwargs = kwargs self._shape = shape self._dtype = dtype self._data = None if deps is None: self._deps = (*find_lazy(self._args), *find_lazy(self._kwargs)) else: self._deps = deps @classmethod
Apache License 2.0
glenjarvis/dmarc-lambda
task2_extract_attachment/file_extraction.py
extract_files
python
def extract_files(source_filepath, target_directory): result = [] def file_content_consumer(file_content): result.append(file_content) file_consumer = FileConsumer( file_content_consumer ) tee_consumer1 = TeeConsumer( file_consumer ) file_archive_consumer = FileArchiveConsumer( tee_consumer1, target_directory ) file_archive_patch_consumer = FileArchivePatchConsumer( file_archive_consumer ) file_attchmnt_content_consumer = FileAttachmentContentConsumer( file_archive_patch_consumer, target_directory ) tee_consumer2 = TeeConsumer( file_attchmnt_content_consumer ) workflow = MailFileConsumer( tee_consumer2 ) workflow( source_filepath ) return result
the signature of this function is the API of this module the implementation of this function is the agent of dependency injection source_filepath is the full path to the input file target_directory is the full path of the directory to be used for processing return value is a collection of utf-8 strings, each of which contains the contents of an extracted and unarchived file
https://github.com/glenjarvis/dmarc-lambda/blob/a0efea40adfbf2ced9a998b8a938ad23198b5485/task2_extract_attachment/file_extraction.py#L20-L67
import os import zipfile from collections import namedtuple from email import policy from email.parser import BytesParser __author__ = 'sweetjonnie' __license__ = 'Apache License 2.0' __email__ = 'jsavell@gmail.com' __status__ = 'Prototype' FileAttributes = namedtuple('FileAttributes', ['file_name', 'file_content'])
Apache License 2.0
zbrookle/sql_to_ibis
scripts/validate_docstrings.py
error
python
def error(code, **kwargs): return (code, ERROR_MSGS[code].format(**kwargs))
Return a tuple with the error code and the message with variables replaced. This is syntactic sugar so instead of: - `('EX02', ERROR_MSGS['EX02'].format(doctest_log=log))` We can simply use: - `error('EX02', doctest_log=log)` Parameters ---------- code : str Error code. **kwargs Values for the variables in the error messages Returns ------- code : str Error code. message : str Error message with variables replaced.
https://github.com/zbrookle/sql_to_ibis/blob/5d29ff903fd61f7c652f7763f5cd58b76f9a083f/scripts/validate_docstrings.py#L144-L168
import argparse import ast import collections import doctest import functools import glob import importlib import inspect import json import os import pydoc import re import sys import tempfile import textwrap import flake8.main.application try: from io import StringIO except ImportError: from cStringIO import StringIO os.environ["MPLBACKEND"] = "Template" import matplotlib matplotlib.rc("figure", max_open_warning=10000) import numpy BASE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, os.path.join(BASE_PATH)) import pandas sys.path.insert(1, os.path.join(BASE_PATH, "doc", "sphinxext")) from numpydoc.docscrape import NumpyDocString from pandas.io.formats.printing import pprint_thing PRIVATE_CLASSES = ["NDFrame", "IndexOpsMixin"] DIRECTIVES = ["versionadded", "versionchanged", "deprecated"] DIRECTIVE_PATTERN = re.compile(rf"^\s*\.\. ({'|'.join(DIRECTIVES)})(?!::)", re.I | re.M) ALLOWED_SECTIONS = [ "Parameters", "Attributes", "Methods", "Returns", "Yields", "Other Parameters", "Raises", "Warns", "See Also", "Notes", "References", "Examples", ] ERROR_MSGS = { "GL01": "Docstring text (summary) should start in the line immediately " "after the opening quotes (not in the same line, or leaving a " "blank line in between)", "GL02": "Closing quotes should be placed in the line after the last text " "in the docstring (do not close the quotes in the same line as " "the text, or leave a blank line between the last text and the " "quotes)", "GL03": "Double line break found; please use only one blank line to " "separate sections or paragraphs, and do not leave blank lines " "at the end of docstrings", "GL04": "Private classes ({mentioned_private_classes}) should not be " "mentioned in public docstrings", "GL05": 'Tabs found at the start of line "{line_with_tabs}", please use ' "whitespace only", "GL06": 'Found unknown section "{section}". Allowed sections are: ' "{allowed_sections}", "GL07": "Sections are in the wrong order. Correct order is: {correct_sections}", "GL08": "The object does not have a docstring", "GL09": "Deprecation warning should precede extended summary", "GL10": "reST directives {directives} must be followed by two colons", "SS01": "No summary found (a short summary in a single line should be " "present at the beginning of the docstring)", "SS02": "Summary does not start with a capital letter", "SS03": "Summary does not end with a period", "SS04": "Summary contains heading whitespaces", "SS05": "Summary must start with infinitive verb, not third person " '(e.g. use "Generate" instead of "Generates")', "SS06": "Summary should fit in a single line", "ES01": "No extended summary found", "PR01": "Parameters {missing_params} not documented", "PR02": "Unknown parameters {unknown_params}", "PR03": "Wrong parameters order. Actual: {actual_params}. " "Documented: {documented_params}", "PR04": 'Parameter "{param_name}" has no type', "PR05": 'Parameter "{param_name}" type should not finish with "."', "PR06": 'Parameter "{param_name}" type should use "{right_type}" instead ' 'of "{wrong_type}"', "PR07": 'Parameter "{param_name}" has no description', "PR08": 'Parameter "{param_name}" description should start with a ' "capital letter", "PR09": 'Parameter "{param_name}" description should finish with "."', "PR10": 'Parameter "{param_name}" requires a space before the colon ' "separating the parameter name and type", "RT01": "No Returns section found", "RT02": "The first line of the Returns section should contain only the " "type, unless multiple values are being returned", "RT03": "Return value has no description", "RT04": "Return value description should start with a capital letter", "RT05": 'Return value description should finish with "."', "YD01": "No Yields section found", "SA01": "See Also section not found", "SA02": "Missing period at end of description for See Also " '"{reference_name}" reference', "SA03": "Description should be capitalized for See Also " '"{reference_name}" reference', "SA04": 'Missing description for See Also "{reference_name}" reference', "SA05": "{reference_name} in `See Also` section does not need `pandas` " "prefix, use {right_reference} instead.", "EX01": "No examples section found", "EX02": "Examples do not pass tests:\n{doctest_log}", "EX03": "flake8 error: {error_code} {error_message}{times_happening}", "EX04": "Do not import {imported_library}, as it is imported " "automatically for the examples (numpy as np, pandas as pd)", }
BSD 3-Clause New or Revised License
django/channels
channels/layers.py
InMemoryChannelLayer.send
python
async def send(self, channel, message): assert isinstance(message, dict), "message is not a dict" assert self.valid_channel_name(channel), "Channel name not valid" assert "__asgi_channel__" not in message queue = self.channels.setdefault(channel, asyncio.Queue()) if queue.qsize() >= self.capacity: raise ChannelFull(channel) await queue.put((time.time() + self.expiry, deepcopy(message)))
Send a message onto a (general or specific) channel.
https://github.com/django/channels/blob/76a3e7a6016ed95d02319122121904f6c0a12b9d/channels/layers.py#L219-L236
import asyncio import fnmatch import random import re import string import time from copy import deepcopy from django.conf import settings from django.core.signals import setting_changed from django.utils.module_loading import import_string from channels import DEFAULT_CHANNEL_LAYER from .exceptions import ChannelFull, InvalidChannelLayerError class ChannelLayerManager: def __init__(self): self.backends = {} setting_changed.connect(self._reset_backends) def _reset_backends(self, setting, **kwargs): if setting == "CHANNEL_LAYERS": self.backends = {} @property def configs(self): return getattr(settings, "CHANNEL_LAYERS", {}) def make_backend(self, name): config = self.configs[name].get("CONFIG", {}) return self._make_backend(name, config) def make_test_backend(self, name): try: config = self.configs[name]["TEST_CONFIG"] except KeyError: raise InvalidChannelLayerError("No TEST_CONFIG specified for %s" % name) return self._make_backend(name, config) def _make_backend(self, name, config): if "ROUTING" in self.configs[name]: raise InvalidChannelLayerError( "ROUTING key found for %s - this is no longer needed in Channels 2." % name ) try: backend_class = import_string(self.configs[name]["BACKEND"]) except KeyError: raise InvalidChannelLayerError("No BACKEND specified for %s" % name) except ImportError: raise InvalidChannelLayerError( "Cannot import BACKEND %r specified for %s" % (self.configs[name]["BACKEND"], name) ) return backend_class(**config) def __getitem__(self, key): if key not in self.backends: self.backends[key] = self.make_backend(key) return self.backends[key] def __contains__(self, key): return key in self.configs def set(self, key, layer): old = self.backends.get(key, None) self.backends[key] = layer return old class BaseChannelLayer: def __init__(self, expiry=60, capacity=100, channel_capacity=None): self.expiry = expiry self.capacity = capacity self.channel_capacity = channel_capacity or {} def compile_capacities(self, channel_capacity): result = [] for pattern, value in channel_capacity.items(): if hasattr(pattern, "match"): result.append((pattern, value)) else: result.append((re.compile(fnmatch.translate(pattern)), value)) return result def get_capacity(self, channel): for pattern, capacity in self.channel_capacity: if pattern.match(channel): return capacity return self.capacity def match_type_and_length(self, name): if isinstance(name, str) and (len(name) < 100): return True return False channel_name_regex = re.compile(r"^[a-zA-Z\d\-_.]+(\![\d\w\-_.]*)?$") group_name_regex = re.compile(r"^[a-zA-Z\d\-_.]+$") invalid_name_error = ( "{} name must be a valid unicode string containing only ASCII " + "alphanumerics, hyphens, underscores, or periods." ) def valid_channel_name(self, name, receive=False): if self.match_type_and_length(name): if bool(self.channel_name_regex.match(name)): if "!" in name and not name.endswith("!") and receive: raise TypeError( "Specific channel names in receive() must end at the !" ) return True raise TypeError( "Channel name must be a valid unicode string containing only ASCII " + "alphanumerics, hyphens, or periods, not '{}'.".format(name) ) def valid_group_name(self, name): if self.match_type_and_length(name): if bool(self.group_name_regex.match(name)): return True raise TypeError( "Group name must be a valid unicode string containing only ASCII " + "alphanumerics, hyphens, or periods." ) def valid_channel_names(self, names, receive=False): _non_empty_list = True if names else False _names_type = isinstance(names, list) assert _non_empty_list and _names_type, "names must be a non-empty list" assert all( self.valid_channel_name(channel, receive=receive) for channel in names ) return True def non_local_name(self, name): if "!" in name: return name[: name.find("!") + 1] else: return name class InMemoryChannelLayer(BaseChannelLayer): def __init__( self, expiry=60, group_expiry=86400, capacity=100, channel_capacity=None, **kwargs ): super().__init__( expiry=expiry, capacity=capacity, channel_capacity=channel_capacity, **kwargs ) self.channels = {} self.groups = {} self.group_expiry = group_expiry extensions = ["groups", "flush"]
BSD 3-Clause New or Revised License
alpacahq/pylivetrader
pylivetrader/misc/functional.py
getattrs
python
def getattrs(value, attrs, default=_no_default): try: for attr in attrs: value = getattr(value, attr) except AttributeError: if default is _no_default: raise value = default return value
Perform a chained application of ``getattr`` on ``value`` with the values in ``attrs``. If ``default`` is supplied, return it if any of the attribute lookups fail. Parameters ---------- value : object Root of the lookup chain. attrs : iterable[str] Sequence of attributes to look up. default : object, optional Value to return if any of the lookups fail. Returns ------- result : object Result of the lookup sequence. Example ------- >>> class EmptyObject(object): ... pass ... >>> obj = EmptyObject() >>> obj.foo = EmptyObject() >>> obj.foo.bar = "value" >>> getattrs(obj, ('foo', 'bar')) 'value' >>> getattrs(obj, ('foo', 'buzz')) Traceback (most recent call last): ... AttributeError: 'EmptyObject' object has no attribute 'buzz' >>> getattrs(obj, ('foo', 'buzz'), 'default') 'default'
https://github.com/alpacahq/pylivetrader/blob/2d9bf97103814409ba8b56a4291f2655c59514ee/pylivetrader/misc/functional.py#L269-L316
from functools import reduce from pprint import pformat from six import viewkeys from six.moves import map, zip from toolz import curry, flip from .sentinel import sentinel @curry def apply(f, *args, **kwargs): return f(*args, **kwargs) instance = apply def mapall(funcs, seq): for func in funcs: for elem in seq: yield func(elem) def same(*values): if not values: return True first, rest = values[0], values[1:] return all(value == first for value in rest) def _format_unequal_keys(dicts): return pformat([sorted(d.keys()) for d in dicts]) def dzip_exact(*dicts): if not same(*map(viewkeys, dicts)): raise ValueError( "dict keys not all equal:\n\n%s" % _format_unequal_keys(dicts) ) return {k: tuple(d[k] for d in dicts) for k in dicts[0]} def _gen_unzip(it, elem_len): elem = next(it) first_elem_len = len(elem) if elem_len is not None and elem_len != first_elem_len: raise ValueError( 'element at index 0 was length %d, expected %d' % ( first_elem_len, elem_len, ) ) else: elem_len = first_elem_len yield elem for n, elem in enumerate(it, 1): if len(elem) != elem_len: raise ValueError( 'element at index %d was length %d, expected %d' % ( n, len(elem), elem_len, ), ) yield elem def unzip(seq, elem_len=None): ret = tuple(zip(*_gen_unzip(map(tuple, seq), elem_len))) if ret: return ret if elem_len is None: raise ValueError("cannot unzip empty sequence without 'elem_len'") return ((),) * elem_len _no_default = sentinel('_no_default')
Apache License 2.0
erdc/proteus
proteus/Gauges.py
Gauges.getLocalElement
python
def getLocalElement(self, femSpace, location, node): patchBoundaryNodes=set() checkedElements=[] for eOffset in range(femSpace.mesh.nodeElementOffsets[node], femSpace.mesh.nodeElementOffsets[node + 1]): eN = femSpace.mesh.nodeElementsArray[eOffset] checkedElements.append(eN) patchBoundaryNodes|=set(femSpace.mesh.elementNodesArray[eN]) xi = femSpace.elementMaps.getInverseValue(eN, location) if femSpace.elementMaps.referenceElement.onElement(xi): return eN for node in patchBoundaryNodes: for eOffset in range(femSpace.mesh.nodeElementOffsets[node], femSpace.mesh.nodeElementOffsets[node + 1]): eN = femSpace.mesh.nodeElementsArray[eOffset] if eN not in checkedElements: checkedElements.append(eN) xi = femSpace.elementMaps.getInverseValue(eN, location) if femSpace.elementMaps.referenceElement.onElement(xi): return eN return None
Given a location and its nearest node, determine if it is on a local element. Returns None if location is not on any elements owned by this process
https://github.com/erdc/proteus/blob/fe4872257aae10b5a686394e78259582e93a39cb/proteus/Gauges.py#L216-L248
from __future__ import print_function from __future__ import division from builtins import zip from builtins import str from builtins import range from past.utils import old_div import os from collections import defaultdict, OrderedDict from itertools import product from mpi4py import MPI from petsc4py import PETSc import numpy as np from numpy.linalg import norm from . import Comm from .AuxiliaryVariables import AV_base from .Profiling import logEvent from proteus.MeshTools import triangleVerticesToNormals, tetrahedronVerticesToNormals, getMeshIntersections from proteus import Profiling def PointGauges(gauges, activeTime=None, sampleRate=0, fileName='point_gauges.csv'): points = OrderedDict() fields = list() for gauge in gauges: gauge_fields, gauge_points = gauge for field in gauge_fields: if field not in fields: fields.append(field) for point in gauge_points: if point not in points: l_d = {'fields': set()} points[point] = l_d points[point]['fields'].update(gauge_fields) return Gauges(fields, activeTime, sampleRate, fileName, points=points) def LineGauges(gauges, activeTime=None, sampleRate=0, fileName='line_gauges.csv'): lines = list() fields = list() for gauge in gauges: gauge_fields, gauge_lines = gauge for field in gauge_fields: if field not in fields: fields.append(field) lines.extend(product(gauge_fields, gauge_lines)) return Gauges(fields, activeTime, sampleRate, fileName, lines=lines) def LineIntegralGauges(gauges, activeTime=None, sampleRate=0, fileName='line_integral_gauges.csv'): lines = list() fields = list() for gauge in gauges: gauge_fields, gauge_lines = gauge for field in gauge_fields: if field not in fields: fields.append(field) lines.extend(product(gauge_fields, gauge_lines)) return Gauges(fields, activeTime, sampleRate, fileName, lines=lines, integrate=True) class Gauges(AV_base): def __init__(self, fields, activeTime=None, sampleRate=0, fileName='gauges.csv', points=None, lines=None, integrate=False): AV_base.__init__(self) self.fields = fields self.activeTime = activeTime self.sampleRate = sampleRate self.fileName = fileName self.points = points if points else OrderedDict() self.lines = lines if lines else [] self.file = None self.flags = {} self.files = {} self.outputWriterReady = False self.last_output = None self.pointGaugeMats = [] self.field_ids = [] self.dofsVecs = [] self.pointGaugeVecs = [] self.segments = [] self.adapted = False self.isPointGauge = bool(points) self.isLineGauge = bool(lines) and not integrate self.isLineIntegralGauge = bool(lines) and integrate if not (self.isPointGauge or self.isLineGauge or self.isLineIntegralGauge): raise ValueError("Need to provide points or lines") if sum((self.isPointGauge, self.isLineGauge, self.isLineIntegralGauge)) > 1: raise ValueError("Must be one of point or line gauge but not both") def getLocalNearestNode(self, location): nearest_node_distance_kdtree, nearest_node_kdtree = self.nodes_kdtree.query(location) comm = Comm.get().comm.tompi4py() return comm.rank, nearest_node_kdtree, nearest_node_distance_kdtree
MIT License
elastic/elasticsearch-dsl-py
elasticsearch_dsl/index.py
Index.search
python
def search(self, using=None): return Search( using=using or self._using, index=self._name, doc_type=self._doc_types )
Return a :class:`~elasticsearch_dsl.Search` object searching over the index (or all the indices belonging to this template) and its ``Document``\\s.
https://github.com/elastic/elasticsearch-dsl-py/blob/7874040b4f1346552a77b6f2a16321f2e1a6c722/elasticsearch_dsl/index.py#L248-L256
from . import analysis from .connections import get_connection from .exceptions import IllegalOperation from .mapping import Mapping from .search import Search from .update_by_query import UpdateByQuery from .utils import merge class IndexTemplate(object): def __init__(self, name, template, index=None, order=None, **kwargs): if index is None: self._index = Index(template, **kwargs) else: if kwargs: raise ValueError( "You cannot specify options for Index when" " passing an Index instance." ) self._index = index.clone() self._index._name = template self._template_name = name self.order = order def __getattr__(self, attr_name): return getattr(self._index, attr_name) def to_dict(self): d = self._index.to_dict() d["index_patterns"] = [self._index._name] if self.order is not None: d["order"] = self.order return d def save(self, using=None): es = get_connection(using or self._index._using) return es.indices.put_template(name=self._template_name, body=self.to_dict()) class Index(object): def __init__(self, name, using="default"): self._name = name self._doc_types = [] self._using = using self._settings = {} self._aliases = {} self._analysis = {} self._mapping = None def get_or_create_mapping(self): if self._mapping is None: self._mapping = Mapping() return self._mapping def as_template(self, template_name, pattern=None, order=None): return IndexTemplate( template_name, pattern or self._name, index=self, order=order ) def resolve_nested(self, field_path): for doc in self._doc_types: nested, field = doc._doc_type.mapping.resolve_nested(field_path) if field is not None: return nested, field if self._mapping: return self._mapping.resolve_nested(field_path) return (), None def resolve_field(self, field_path): for doc in self._doc_types: field = doc._doc_type.mapping.resolve_field(field_path) if field is not None: return field if self._mapping: return self._mapping.resolve_field(field_path) return None def load_mappings(self, using=None): self.get_or_create_mapping().update_from_es( self._name, using=using or self._using ) def clone(self, name=None, using=None): i = Index(name or self._name, using=using or self._using) i._settings = self._settings.copy() i._aliases = self._aliases.copy() i._analysis = self._analysis.copy() i._doc_types = self._doc_types[:] if self._mapping is not None: i._mapping = self._mapping._clone() return i def _get_connection(self, using=None): if self._name is None: raise ValueError("You cannot perform API calls on the default index.") return get_connection(using or self._using) connection = property(_get_connection) def mapping(self, mapping): self.get_or_create_mapping().update(mapping) def document(self, document): self._doc_types.append(document) if document._index._name is None: document._index = self return document def settings(self, **kwargs): self._settings.update(kwargs) return self def aliases(self, **kwargs): self._aliases.update(kwargs) return self def analyzer(self, *args, **kwargs): analyzer = analysis.analyzer(*args, **kwargs) d = analyzer.get_analysis_definition() if not d: return merge(self._analysis, d, True) def to_dict(self): out = {} if self._settings: out["settings"] = self._settings if self._aliases: out["aliases"] = self._aliases mappings = self._mapping.to_dict() if self._mapping else {} analysis = self._mapping._collect_analysis() if self._mapping else {} for d in self._doc_types: mapping = d._doc_type.mapping merge(mappings, mapping.to_dict(), True) merge(analysis, mapping._collect_analysis(), True) if mappings: out["mappings"] = mappings if analysis or self._analysis: merge(analysis, self._analysis) out.setdefault("settings", {})["analysis"] = analysis return out
Apache License 2.0
centerforopenscience/share
share/transformers/org_datacite.py
get_contributors
python
def get_contributors(options, contrib_type): contribs = [] for value in options: val = try_contributor_type(value, contrib_type) if val: contribs.append(val) return contribs
Returns list of contributors names based on their type.
https://github.com/centerforopenscience/share/blob/c7715af2881f6fa23197d4e7c381d90169a90ed1/share/transformers/org_datacite.py#L50-L59
import logging from share.exceptions import TransformError from share.transform.chain import ctx, links as tools, ChainTransformer from share.transform.chain.exceptions import InvalidIRI, InvalidPath from share.transform.chain.parsers import Parser from share.transform.chain.utils import force_text logger = logging.getLogger(__name__) PEOPLE_TYPES = ( 'ContactPerson', 'DataCurator', 'Editor', 'ProjectLeader', 'ProjectManager', 'ProjectMember', 'RelatedPerson', 'Researcher', 'Supervisor', 'WorkPackageLeader' ) NOT_PEOPLE_TYPES = ( 'Distributor', 'HostingInstitution', 'RegistrationAgency', 'RegistrationAuthority', 'ResearchGroup' ) def try_contributor_type(value, target_list_types): try: contrib_type_item = value['@contributorType'] if contrib_type_item in target_list_types: return value return None except KeyError: return None
Apache License 2.0
sk1712/gcn_metric_learning
lib/abide_utils.py
load_all_networks
python
def load_all_networks(subject_list, kind, atlas_name="aal"): all_networks = [] for subject in subject_list: fl = os.path.join(root_folder, subject, subject + "_" + atlas_name + "_" + kind + ".mat") matrix = sio.loadmat(fl)['connectivity'] if atlas_name == 'ho': matrix = np.delete(matrix, 82, axis=0) matrix = np.delete(matrix, 82, axis=1) all_networks.append(matrix) return all_networks
subject_list : the subject short IDs list kind : the kind of connectivity to be used, e.g. lasso, partial correlation, correlation atlas_name : name of the atlas used returns: all_networks : list of connectivity matrices (regions x regions)
https://github.com/sk1712/gcn_metric_learning/blob/50abd52bcd29d2d1ccfc27cbffe71b91c0629c7e/lib/abide_utils.py#L283-L307
import os import csv import numpy as np import scipy.io as sio from sklearn.covariance import GraphLassoCV import nilearn from nilearn import connectome save_path = '/vol/dhcp-hcp-data/ABIDE' num_subjects = 1000 pipeline = 'cpac' derivatives = ['rois_ho'] root_folder = os.path.join(save_path, 'ABIDE_pcp/cpac/filt_noglobal') def get_ids(num_subjects=None, short=True): if short: subject_IDs = np.loadtxt(os.path.join(root_folder, 'subject_IDs.txt'), dtype=int) subject_IDs = subject_IDs.astype(str) else: subject_IDs = np.loadtxt(os.path.join(root_folder, 'full_IDs.txt'), dtype=str) if num_subjects is not None: subject_IDs = subject_IDs[:num_subjects] return subject_IDs def fetch_filenames(subject_list, file_type): filemapping = {'func_preproc': '_func_preproc.nii.gz', 'rois_aal': '_rois_aal.1D', 'rois_cc200': '_rois_cc200.1D', 'rois_ho': '_rois_ho.1D'} filenames = [] subject_IDs = get_ids(short=True) subject_IDs = subject_IDs.tolist() full_IDs = get_ids(short=False) for s in subject_list: try: if file_type in filemapping: idx = subject_IDs.index(s) pattern = full_IDs[idx] + filemapping[file_type] else: pattern = s + file_type filenames.append(os.path.join(root_folder, s, pattern)) except ValueError: filenames.append('N/A') return filenames def fetch_subject_files(subjectID): subject_IDs = get_ids(short=True) subject_IDs = subject_IDs.tolist() full_IDs = get_ids(short=False) try: idx = subject_IDs.index(subjectID) subject_folder = os.path.join(root_folder, subjectID) onlyfiles = [os.path.join(subject_folder, f) for f in os.listdir(subject_folder) if os.path.isfile(os.path.join(subject_folder, f))] except ValueError: onlyfiles = [] return onlyfiles def fetch_conn_matrices(subject_list, atlas_name, kind): conn_files = fetch_filenames(subject_list, '_' + atlas_name + '_' + kind.replace(' ', '_') + '.mat') conn_matrices = [] for fl in conn_files: print("Reading connectivity file %s" % fl) try: mat = sio.loadmat(fl)['connectivity'] conn_matrices.append(mat) except IOError: print("File %s does not exist" % fl) return conn_matrices def get_timeseries(subject_list, atlas_name): ts_files = fetch_filenames(subject_list, 'rois_' + atlas_name) ts = [] for fl in ts_files: print("Reading timeseries file %s" % fl) ts.append(np.loadtxt(fl, skiprows=0)) return ts def norm_timeseries(ts_list): norm_ts = [] for ts in ts_list: norm_ts.append(nilearn.signal.clean(ts, detrend=False)) return norm_ts def subject_connectivity(timeseries, subject, atlas_name, kind, save=True, save_path=root_folder): print("Estimating %s matrix for subject %s" % (kind, subject)) if kind == 'lasso': covariance_estimator = GraphLassoCV(verbose=1) covariance_estimator.fit(timeseries) connectivity = covariance_estimator.covariance_ print('Covariance matrix has shape {0}.'.format(connectivity.shape)) elif kind in ['tangent', 'partial correlation', 'correlation']: conn_measure = connectome.ConnectivityMeasure(kind=kind) connectivity = conn_measure.fit_transform([timeseries])[0] if save: subject_file = os.path.join(save_path, subject, subject + '_' + atlas_name + '_' + kind.replace(' ', '_') + '.mat') sio.savemat(subject_file, {'connectivity': connectivity}) return connectivity def group_connectivity(timeseries, subject_list, atlas_name, kind, save=True, save_path=root_folder): if kind == 'lasso': covariance_estimator = GraphLassoCV(verbose=1) connectivity_matrices = [] for i, ts in enumerate(timeseries): covariance_estimator.fit(ts) connectivity = covariance_estimator.covariance_ connectivity_matrices.append(connectivity) print('Covariance matrix has shape {0}.'.format(connectivity.shape)) elif kind in ['tangent', 'partial correlation', 'correlation']: conn_measure = connectome.ConnectivityMeasure(kind=kind) connectivity_matrices = conn_measure.fit_transform(timeseries) if save: for i, subject in enumerate(subject_list): subject_file = os.path.join(save_path, subject_list[i], subject_list[i] + '_' + atlas_name + '_' + kind.replace(' ', '_') + '.mat') sio.savemat(subject_file, {'connectivity': connectivity_matrices[i]}) print("Saving connectivity matrix to %s" % subject_file) return connectivity_matrices def get_subject_label(subject_list, label_name): label = {} with open(os.path.join(save_path, 'ABIDE_pcp/Phenotypic_V1_0b_preprocessed1.csv')) as csvfile: reader = csv.DictReader(csvfile) for row in reader: if row['subject'] in subject_list: label[row['subject']] = row[label_name] return label
MIT License
covid19tracking/quality-control
run_quality_cli.py
load_args_parser
python
def load_args_parser(config) -> ArgumentParser: parser = ArgumentParser( description=__doc__, formatter_class=RawDescriptionHelpFormatter) parser.add_argument('state', metavar='state', type=str, nargs='*', help='states to check') parser.add_argument( '-w', '--working', dest='check_working', action='store_true', default=False, help='check the working results (only)') parser.add_argument( '-d', '--current', dest='check_current', action='store_true', default=False, help='check the current (only)') parser.add_argument( '-x', '--history', dest='check_history', action='store_true', default=False, help='check the history (only)') save_results = config["CHECKS"]["save_results"] == "True" enable_experimental = config["CHECKS"]["enable_experimental"] == "True" enable_debug = config["CHECKS"]["enable_debug"] == "True" plot_models = config["MODEL"]["plot_models"] == "True" parser.add_argument( '--save', dest='save_results', action='store_true', default=save_results, help='save results to file') parser.add_argument( '-exp', '--experimental', dest='enable_experimental', action='store_true', default=enable_experimental, help='enable experimental checks') parser.add_argument( '--debug', dest='enable_debug', action='store_true', default=enable_debug, help='enable debug traces') parser.add_argument( '--plot', dest='plot_models', action='store_true', default=plot_models, help='plot the model curves') parser.add_argument( '--results_dir', default=config["CHECKS"]["results_dir"], help='directory for results files') parser.add_argument( '--images_dir', default=config["MODEL"]["images_dir"], help='directory for model curves') return parser
load arguments parser
https://github.com/covid19tracking/quality-control/blob/a4395d98c29f534ee0ec2d49fb88fec77ffc6dd9/run_quality_cli.py#L13-L66
import sys from loguru import logger from argparse import ArgumentParser, Namespace, RawDescriptionHelpFormatter from app.util import read_config_file from app.qc_config import QCConfig from app.data.data_source import DataSource from app.check_dataset import check_current, check_working, check_history
Apache License 2.0
dingmyu/hr-nas
models/mobilenet_base.py
output_network
python
def output_network(model): model_kwargs = { key: getattr(model, key) for key in [ 'input_channel', 'last_channel', 'active_fn', ] } blocks = list(model.get_named_block_list().values()) res = [] for block in blocks: res.append([ block.output_dim, 1, block.stride, block.kernel_sizes, block.channels, block.expand ]) model_kwargs['inverted_residual_setting'] = res return model_kwargs
Output network kwargs in `searched_network` style.
https://github.com/dingmyu/hr-nas/blob/003c3b6bd0168751c884b6999ffc8c13b36a39e2/models/mobilenet_base.py#L604-L621
import abc import collections import logging import functools import math import numpy as np import torch from torch import nn from torch.nn import functional as F import models.compress_utils as cu from utils.common import add_prefix from utils.common import get_device from models.transformer import Transformer def _make_divisible(v, divisor, min_value=None): if min_value is None: min_value = divisor new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) if new_v < 0.9 * v: new_v += divisor return new_v class CheckpointModule(nn.Module, metaclass=abc.ABCMeta): def __init__(self, use_checkpoint=True): super(CheckpointModule, self).__init__() self._use_checkpoint = use_checkpoint def forward(self, *args, **kwargs): from torch.utils.checkpoint import checkpoint if self._use_checkpoint: return checkpoint(self._forward, *args, **kwargs) return self._forward(*args, **kwargs) @abc.abstractmethod def _forward(self, *args, **kwargs): pass class Identity(nn.Module): def forward(self, x): return x class Narrow(nn.Module): def __init__(self, dimension, start, length): super(Narrow, self).__init__() self.dimension = dimension self.start = start self.length = length def forward(self, x): return x.narrow(self.dimension, self.start, self.length) class Swish(nn.Module): def forward(self, x): return x * torch.sigmoid(x) class HSwish(object): def forward(self, x): return x * F.relu6(x + 3, True).div_(6) class SqueezeAndExcitation(nn.Module): def __init__(self, n_feature, n_hidden, spatial_dims=[2, 3], active_fn=None): super(SqueezeAndExcitation, self).__init__() self.n_feature = n_feature self.n_hidden = n_hidden self.spatial_dims = spatial_dims self.se_reduce = nn.Conv2d(n_feature, n_hidden, 1, bias=True) self.se_expand = nn.Conv2d(n_hidden, n_feature, 1, bias=True) self.active_fn = active_fn() def forward(self, x): se_tensor = x.mean(self.spatial_dims, keepdim=True) se_tensor = self.se_expand(self.active_fn(self.se_reduce(se_tensor))) return torch.sigmoid(se_tensor) * x def __repr__(self): return '{}({}, {}, spatial_dims={}, active_fn={})'.format( self._get_name(), self.n_feature, self.n_hidden, self.spatial_dims, self.active_fn) class ConvBNReLU(nn.Sequential): def __init__(self, in_planes, out_planes, kernel_size=3, stride=1, groups=1, active_fn=None, batch_norm_kwargs=None, dilation=1, padding=None, **kwargs): if batch_norm_kwargs is None: batch_norm_kwargs = {} if not padding: padding = (kernel_size - 1) // 2 super(ConvBNReLU, self).__init__( nn.Conv2d(in_planes, out_planes, kernel_size, stride, padding, dilation=dilation, groups=groups, bias=False), nn.BatchNorm2d(out_planes, **batch_norm_kwargs), active_fn() if active_fn is not None else Identity()) class InvertedResidualChannelsFused(nn.Module): def __init__(self, inp, oup, stride, channels, kernel_sizes, expand, active_fn=None, batch_norm_kwargs=None, se_ratio=0.5, use_transformer=False, downsampling_transformer=False): super(InvertedResidualChannelsFused, self).__init__() assert stride in [1, 2] assert len(channels) == len(kernel_sizes) self.input_dim = inp self.output_dim = oup self.expand = expand self.stride = stride self.kernel_sizes = kernel_sizes self.channels = channels self.use_res_connect = self.stride == 1 and inp == oup self.batch_norm_kwargs = batch_norm_kwargs self.active_fn = active_fn self.se_ratio = se_ratio self.use_transformer = use_transformer self.downsampling_transformer = downsampling_transformer (self.expand_conv, self.depth_ops, self.project_conv, self.se_op) = self._build(channels, kernel_sizes, expand, se_ratio) if not self.use_res_connect: group = [x for x in range(1, self.input_dim + 1) if self.input_dim % x == 0 and self.output_dim % x == 0][-1] self.residual = nn.Conv2d(self.input_dim, self.output_dim, kernel_size=1, stride=self.stride, padding=0, groups=group, bias=False) if self.use_transformer and self.use_res_connect: self.transformer = Transformer(8, inp) if self.use_transformer and self.downsampling_transformer and not self.use_res_connect: self.transformer = Transformer(8, inp, oup, downsampling=(stride == 2)) def _build(self, hidden_dims, kernel_sizes, expand, se_ratio): _batch_norm_kwargs = self.batch_norm_kwargs if self.batch_norm_kwargs is not None else {} hidden_dim_total = sum(hidden_dims) if self.expand and hidden_dim_total: expand_conv = ConvBNReLU(self.input_dim, hidden_dim_total, kernel_size=1, batch_norm_kwargs=_batch_norm_kwargs, active_fn=self.active_fn) else: expand_conv = Identity() narrow_start = 0 depth_ops = nn.ModuleList() for k, hidden_dim in zip(kernel_sizes, hidden_dims): layers = [] if expand: layers.append(Narrow(1, narrow_start, hidden_dim)) narrow_start += hidden_dim else: if hidden_dim != self.input_dim: raise RuntimeError('uncomment this for search_first model') logging.warning( 'uncomment this for previous trained search_first model') layers.extend([ ConvBNReLU(hidden_dim, hidden_dim, kernel_size=k, stride=self.stride, groups=hidden_dim, batch_norm_kwargs=_batch_norm_kwargs, active_fn=self.active_fn), ]) depth_ops.append(nn.Sequential(*layers)) if hidden_dim_total: project_conv = nn.Sequential( nn.Conv2d(hidden_dim_total, self.output_dim, 1, 1, 0, bias=False), nn.BatchNorm2d(self.output_dim, **_batch_norm_kwargs)) else: project_conv = Identity() if expand and narrow_start != hidden_dim_total: raise ValueError('Part of expanded are not used') if hidden_dim_total and se_ratio is not None: se_op = SqueezeAndExcitation(hidden_dim_total, int(round(self.input_dim * se_ratio)), active_fn=self.active_fn) else: se_op = Identity() return expand_conv, depth_ops, project_conv, se_op def get_depthwise_bn(self): return list(self.get_named_depthwise_bn().values()) def get_named_depthwise_bn(self, prefix=None): res = collections.OrderedDict() for i, op in enumerate(self.depth_ops): children = list(op.children()) if self.expand: idx_op = 1 else: raise RuntimeError('Not search_first') conv_bn_relu = children[idx_op] assert isinstance(conv_bn_relu, ConvBNReLU) conv_bn_relu = list(conv_bn_relu.children()) _, bn, _ = conv_bn_relu assert isinstance(bn, nn.BatchNorm2d) name = 'depth_ops.{}.{}.1'.format(i, idx_op) name = add_prefix(name, prefix) res[name] = bn return res def forward(self, x): if len(self.depth_ops) == 0: if not self.use_res_connect: if self.use_transformer and self.downsampling_transformer: return self.residual(x) + self.transformer(x) return self.residual(x) else: if self.use_transformer and self.transformer is not None: x = self.transformer(x) return x res = self.expand_conv(x) res = [op(res) for op in self.depth_ops] if len(res) != 1: res = torch.cat(res, dim=1) else: res = res[0] res = self.se_op(res) res = self.project_conv(res) if self.use_res_connect: if self.use_transformer and self.transformer is not None: x = self.transformer(x) return x + res else: if self.use_transformer and self.downsampling_transformer: return self.residual(x) + self.transformer(x) + res return self.residual(x) + res return res def __repr__(self): return ('{}({}, {}, channels={}, kernel_sizes={}, expand={}, stride={},' ' se_ratio={})').format(self._get_name(), self.input_dim, self.output_dim, self.channels, self.kernel_sizes, self.expand, self.stride, self.se_ratio) transformer_dict = None class InvertedResidualChannels(nn.Module): def __init__(self, inp, oup, stride, channels, kernel_sizes, expand, active_fn=None, batch_norm_kwargs=None, use_transformer=False, downsampling_transformer=False, se_ratio=None): super(InvertedResidualChannels, self).__init__() assert len(channels) == len(kernel_sizes) self.input_dim = inp self.output_dim = oup self.expand = expand self.stride = stride self.kernel_sizes = kernel_sizes self.channels = channels self.use_res_connect = self.stride == 1 and inp == oup self.batch_norm_kwargs = batch_norm_kwargs self.active_fn = active_fn self.use_transformer = use_transformer self.downsampling_transformer = downsampling_transformer self.ops, self.pw_bn = self._build(channels, kernel_sizes, expand) if len(self.ops) > 0 and se_ratio: self.se_op = SqueezeAndExcitation(self.input_dim, int(round(self.input_dim * se_ratio)), active_fn=self.active_fn) else: self.se_op = Identity() if self.use_transformer and self.use_res_connect: if transformer_dict: hidden_dims = transformer_dict[0] transformer_dict.pop(0) if hidden_dims: self.transformer = Transformer(hidden_dims, inp) else: self.transformer = None else: self.transformer = Transformer(64, inp) if self.use_transformer and self.downsampling_transformer and not self.use_res_connect: if transformer_dict: hidden_dims = transformer_dict[0] transformer_dict.pop(0) if hidden_dims: self.transformer = Transformer(hidden_dims, inp, oup, downsampling=(stride == 2)) else: self.transformer = None else: self.transformer = Transformer(64, inp, oup, downsampling=(stride == 2)) if not self.use_res_connect: group = [x for x in range(1, self.input_dim + 1) if self.input_dim % x == 0 and self.output_dim % x == 0][-1] self.residual = nn.Conv2d(self.input_dim, self.output_dim, kernel_size=1, stride=self.stride, padding=0, groups=group, bias=False) self.bns = nn.ModuleList() for hidden_dim in channels: self.bns.append(nn.BatchNorm2d(hidden_dim, **batch_norm_kwargs)) self.index = 0 def _build(self, hidden_dims, kernel_sizes, expand): _batch_norm_kwargs = self.batch_norm_kwargs if self.batch_norm_kwargs is not None else {} narrow_start = 0 ops = nn.ModuleList() for k, hidden_dim in zip(kernel_sizes, hidden_dims): layers = [] if expand: layers.append( ConvBNReLU(self.input_dim, hidden_dim, kernel_size=1, batch_norm_kwargs=_batch_norm_kwargs, active_fn=self.active_fn)) else: if hidden_dim != self.input_dim: raise RuntimeError('uncomment this for search_first model') logging.warning( 'uncomment this for previous trained search_first model') narrow_start += hidden_dim layers.extend([ ConvBNReLU(hidden_dim, hidden_dim, kernel_size=k, stride=self.stride, groups=hidden_dim, batch_norm_kwargs=_batch_norm_kwargs, active_fn=self.active_fn), nn.Conv2d(hidden_dim, self.output_dim, 1, 1, 0, bias=False), ]) ops.append(nn.Sequential(*layers)) pw_bn = nn.BatchNorm2d(self.output_dim, **_batch_norm_kwargs) if not expand and narrow_start != self.input_dim: raise ValueError('Part of input are not used') return ops, pw_bn def get_depthwise_bn(self): return list(self.get_named_depthwise_bn().values()) def get_named_depthwise_bn(self, prefix=None): res = collections.OrderedDict() for i, op in enumerate(self.ops): children = list(op.children()) if self.expand: idx_op = 1 else: idx_op = 0 conv_bn_relu = children[idx_op] assert isinstance(conv_bn_relu, ConvBNReLU) conv_bn_relu = list(conv_bn_relu.children()) _, bn, _ = conv_bn_relu assert isinstance(bn, nn.BatchNorm2d) name = 'ops.{}.{}.1'.format(i, idx_op) name = add_prefix(name, prefix) res[name] = bn return res def forward(self, x): if self.index: tmps = [] for index, op in enumerate(self.ops): tmp = op[0](x) tmp = op[1][0](tmp) tmp = self.bns[index](tmp) tmp = op[1][2](tmp) tmp = op[2](tmp) tmps.append(tmp) tmp = sum(tmps) tmp = self.pw_bn(tmp) if self.use_res_connect: if self.use_transformer and self.transformer is not None: x = self.transformer(x) return x + tmp else: if self.use_transformer and self.downsampling_transformer and self.transformer is not None: return self.residual(x) + self.transformer(x) + tmp return self.residual(x) + tmp if len(self.ops) == 0: if not self.use_res_connect: if self.use_transformer and self.downsampling_transformer and self.transformer is not None: return self.residual(x) + self.transformer(x) return self.residual(x) else: if self.use_transformer and self.transformer is not None: x = self.transformer(x) return x tmp = sum([op(x) for op in self.ops]) tmp = self.pw_bn(tmp) x = self.se_op(x) if self.use_res_connect: if self.use_transformer and self.transformer is not None: x = self.transformer(x) return x + tmp else: if self.use_transformer and self.downsampling_transformer and self.transformer is not None: return self.residual(x) + self.transformer(x) + tmp return self.residual(x) + tmp return tmp def __repr__(self): return ('{}({}, {}, channels={}, kernel_sizes={}, expand={},' ' stride={})').format(self._get_name(), self.input_dim, self.output_dim, self.channels, self.kernel_sizes, self.expand, self.stride) def compress_by_mask(self, masks, **kwargs): device = get_device(self.pw_bn) cu.copmress_inverted_residual_channels(self, masks, **kwargs) self.to(device) def compress_by_threshold(self, threshold, **kwargs): masks = [ bn.weight.detach().abs() > threshold for bn in self.get_depthwise_bn() ] self.compress_by_mask(masks, **kwargs) def get_active_fn(name): active_fn = { 'nn.ReLU6': functools.partial(nn.ReLU6, inplace=True), 'nn.ReLU': functools.partial(nn.ReLU, inplace=True), 'nn.Swish': Swish, 'nn.HSwish': HSwish, }[name] return active_fn def get_block(name): return { 'InvertedResidualChannels': InvertedResidualChannels, 'InvertedResidualChannelsFused': InvertedResidualChannelsFused }[name] def init_weights_slimmable(m): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal_(m.weight, mode='fan_out') if m.bias is not None: nn.init.zeros_(m.bias) elif isinstance(m, nn.BatchNorm2d): nn.init.ones_(m.weight) nn.init.zeros_(m.bias) elif isinstance(m, nn.Linear): nn.init.normal_(m.weight, 0, 0.01) if m.bias is not None: nn.init.zeros_(m.bias) def init_weights_mnas(m): if isinstance(m, nn.Conv2d): if m.groups == m.in_channels: fan_out = m.weight[0][0].numel() else: _, fan_out = nn.init._calculate_fan_in_and_fan_out(m.weight) gain = nn.init.calculate_gain('relu') std = gain / math.sqrt(fan_out) nn.init.normal_(m.weight, 0.0, std) if m.bias is not None: nn.init.zeros_(m.bias) elif isinstance(m, nn.BatchNorm2d): nn.init.ones_(m.weight) nn.init.zeros_(m.bias) elif isinstance(m, nn.Linear): _, fan_out = nn.init._calculate_fan_in_and_fan_out(m.weight) init_range = 1.0 / np.sqrt(fan_out) nn.init.uniform_(m.weight, -init_range, init_range) if m.bias is not None: nn.init.zeros_(m.bias)
MIT License
typeerror/secure
secure/headers.py
StrictTransportSecurity.max_age
python
def max_age(self, seconds: int) -> "StrictTransportSecurity": self._build("max-age={}".format(seconds)) return self
Instruct the browser to remember HTTPS preference until time (seconds) expires. :param seconds: time in seconds :type seconds: str :return: StrictTransportSecurity class :rtype: StrictTransportSecurity
https://github.com/typeerror/secure/blob/04dd035e560583baeba06f5714f273b542c07767/secure/headers.py#L674-L684
import json from typing import Dict, List, Optional, Union import warnings class Server: def __init__(self) -> None: self.header = "Server" self.value = "NULL" def set(self, value: str) -> "Server": self.value = value return self class XContentTypeOptions: def __init__(self) -> None: self.header = "X-Content-Type-Options" self.value = "nosniff" def set(self, value: str) -> "XContentTypeOptions": self.value = value return self class ReportTo: def __init__( self, max_age: int, include_subdomains: bool = False, group: Optional[str] = None, *endpoints: List[Dict[str, Union[str, int]]], ) -> None: self.header = "Report-To" report_to_endpoints = json.dumps(endpoints) report_to_object: Dict[str, Union[str, int]] = { "max_age": max_age, "endpoints": report_to_endpoints, } if group: report_to_object["group"] = group if include_subdomains: report_to_object["include_subdomains"] = include_subdomains self.value = json.dumps(report_to_object) def set(self, value: str) -> "ReportTo": self.value = value return self class ContentSecurityPolicy: def __init__(self) -> None: self.__policy: List[str] = [] self.header = "Content-Security-Policy" self.value = "script-src 'self'; object-src 'self'" def _build(self, directive: str, *sources: str) -> None: if len(sources) == 0: self.__policy.append(directive) else: self.__policy.append(f"{directive} {' '.join(sources)}") self.value = "; ".join(self.__policy) def set(self, value: str) -> "ContentSecurityPolicy": self._build(value) return self def custom_directive( self, directive: str, *sources: str ) -> "ContentSecurityPolicy": self._build(directive, *sources) return self def report_only(self) -> None: self.header = "Content-Security-Policy-Report-Only" def base_uri(self, *sources: str) -> "ContentSecurityPolicy": self._build("base-uri", *sources) return self def child_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("child-src", *sources) return self def connect_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("connect-src", *sources) return self def default_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("default-src", *sources) return self def font_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("font-src", *sources) return self def form_action(self, *sources: str) -> "ContentSecurityPolicy": self._build("form-action", *sources) return self def frame_ancestors(self, *sources: str) -> "ContentSecurityPolicy": self._build("frame-ancestors", *sources) return self def frame_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("frame-src", *sources) return self def img_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("img-src", *sources) return self def manifest_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("manifest-src", *sources) return self def media_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("media-src", *sources) return self def object_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("object-src", *sources) return self def prefetch_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("prefetch-src", *sources) return self def report_to(self, report_to: ReportTo) -> "ContentSecurityPolicy": self._build("report-to", report_to.value) return self def report_uri(self, *values: str) -> "ContentSecurityPolicy": self._build("report-uri", *values) return self def sandbox(self, *values: str) -> "ContentSecurityPolicy": self._build("sandbox", *values) return self def script_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("script-src", *sources) return self def style_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("style-src", *sources) return self def upgrade_insecure_requests(self) -> "ContentSecurityPolicy": self._build("upgrade-insecure-requests") return self def worker_src(self, *sources: str) -> "ContentSecurityPolicy": self._build("worker-src", *sources) return self @staticmethod def nonce(value: str) -> str: value = "'nonce-<{}>'".format(value) return value class XFrameOptions: def __init__(self) -> None: self.header = "X-Frame-Options" self.value = "SAMEORIGIN" def set(self, value: str) -> "XFrameOptions": self.value = value return self def deny(self) -> "XFrameOptions": self.value = "deny" return self def sameorigin(self) -> "XFrameOptions": self.value = "sameorigin" return self class XXSSProtection: def __init__(self) -> None: self.header = "X-XSS-Protection" self.value = "0" def set(self, value: str) -> "XXSSProtection": warnings.warn( "Recommended to utilize Content-Security-Policy", DeprecationWarning, ) self.value = value return self class ReferrerPolicy: def __init__(self) -> None: self.__policy: List[str] = [] self.header = "Referrer-Policy" self.value = "no-referrer, strict-origin-when-cross-origin" def _build(self, directive: str) -> None: self.__policy.append(directive) self.value = ", ".join(self.__policy) def set(self, value: str) -> "ReferrerPolicy": self._build(value) return self def no_referrer(self) -> "ReferrerPolicy": self._build("no-referrer") return self def no_referrer_when_downgrade(self) -> "ReferrerPolicy": self._build("no-referrer-when-downgrade") return self def origin(self) -> "ReferrerPolicy": self._build("origin") return self def origin_when_cross_origin(self) -> "ReferrerPolicy": self._build("origin-when-cross-origin") return self def same_origin(self) -> "ReferrerPolicy": self._build("same-origin") return self def strict_origin(self) -> "ReferrerPolicy": self._build("strict-origin") return self def strict_origin_when_cross_origin(self) -> "ReferrerPolicy": self._build("strict-origin-when-cross-origin") return self def unsafe_url(self) -> "ReferrerPolicy": self._build("unsafe-url") return self class StrictTransportSecurity: def __init__(self) -> None: self.__policy: List[str] = [] self.header = "Strict-Transport-Security" self.value = "max-age=63072000; includeSubdomains" def _build(self, directive: str) -> None: self.__policy.append(directive) self.value = "; ".join(self.__policy) def set(self, value: str) -> "StrictTransportSecurity": self._build(value) return self def include_subdomains(self) -> "StrictTransportSecurity": self._build("includeSubDomains") return self
MIT License
lostdragonist/steam-library-setup-tool
vdf/__init__.py
dump
python
def dump(obj, fp, pretty=False, escaped=True): if not isinstance(obj, Mapping): raise TypeError("Expected data to be an instance of``dict``") if not hasattr(fp, 'write'): raise TypeError("Expected fp to have write() method") if not isinstance(pretty, bool): raise TypeError("Expected pretty to be of type bool") if not isinstance(escaped, bool): raise TypeError("Expected escaped to be of type bool") for chunk in _dump_gen(obj, pretty, escaped): fp.write(chunk)
Serialize ``obj`` as a VDF formatted stream to ``fp`` (a ``.write()``-supporting file-like object).
https://github.com/lostdragonist/steam-library-setup-tool/blob/84312dbd467f99c558d0578c2679c7245968cf26/vdf/__init__.py#L228-L243
__version__ = "3.4" __author__ = "Rossen Georgiev" import re import sys import struct from binascii import crc32 from io import BytesIO from io import StringIO as unicodeIO try: from collections.abc import Mapping except: from collections import Mapping from vdf.vdict import VDFDict if sys.version_info[0] >= 3: string_type = str int_type = int BOMS = '\ufffe\ufeff' def strip_bom(line): return line.lstrip(BOMS) else: from StringIO import StringIO as strIO string_type = basestring int_type = long BOMS = '\xef\xbb\xbf\xff\xfe\xfe\xff' BOMS_UNICODE = '\\ufffe\\ufeff'.decode('unicode-escape') def strip_bom(line): return line.lstrip(BOMS if isinstance(line, str) else BOMS_UNICODE) _unescape_char_map = { r"\n": "\n", r"\t": "\t", r"\v": "\v", r"\b": "\b", r"\r": "\r", r"\f": "\f", r"\a": "\a", r"\\": "\\", r"\?": "?", r"\"": "\"", r"\'": "\'", } _escape_char_map = {v: k for k, v in _unescape_char_map.items()} def _re_escape_match(m): return _escape_char_map[m.group()] def _re_unescape_match(m): return _unescape_char_map[m.group()] def _escape(text): return re.sub(r"[\n\t\v\b\r\f\a\\\?\"']", _re_escape_match, text) def _unescape(text): return re.sub(r"(\\n|\\t|\\v|\\b|\\r|\\f|\\a|\\\\|\\\?|\\\"|\\')", _re_unescape_match, text) def parse(fp, mapper=dict, merge_duplicate_keys=True, escaped=True): if not issubclass(mapper, Mapping): raise TypeError("Expected mapper to be subclass of dict, got %s" % type(mapper)) if not hasattr(fp, 'readline'): raise TypeError("Expected fp to be a file-like object supporting line iteration") stack = [mapper()] expect_bracket = False re_keyvalue = re.compile(r'^("(?P<qkey>(?:\\.|[^\\"])*)"|(?P<key>#?[a-z0-9\-\_\\\?$%<>]+))' r'([ \t]*(' r'"(?P<qval>(?:\\.|[^\\"])*)(?P<vq_end>")?' r'|(?P<val>(?:(?<!/)/(?!/)|[a-z0-9\-\_\\\?\*\.$<> ])+)' r'|(?P<sblock>{[ \t]*)(?P<eblock>})?' r'))?', flags=re.I) for lineno, line in enumerate(fp, 1): if lineno == 1: line = strip_bom(line) line = line.lstrip() if line == "" or line[0] == '/': continue if line[0] == "{": expect_bracket = False continue if expect_bracket: raise SyntaxError("vdf.parse: expected openning bracket", (getattr(fp, 'name', '<%s>' % fp.__class__.__name__), lineno, 1, line)) if line[0] == "}": if len(stack) > 1: stack.pop() continue raise SyntaxError("vdf.parse: one too many closing parenthasis", (getattr(fp, 'name', '<%s>' % fp.__class__.__name__), lineno, 0, line)) while True: match = re_keyvalue.match(line) if not match: try: line += next(fp) continue except StopIteration: raise SyntaxError("vdf.parse: unexpected EOF (open key quote?)", (getattr(fp, 'name', '<%s>' % fp.__class__.__name__), lineno, 0, line)) key = match.group('key') if match.group('qkey') is None else match.group('qkey') val = match.group('qval') if val is None: val = match.group('val') if val is not None: val = val.rstrip() if val == "": val = None if escaped: key = _unescape(key) if val is None: if merge_duplicate_keys and key in stack[-1]: _m = stack[-1][key] if not isinstance(_m, mapper): _m = stack[-1][key] = mapper() else: _m = mapper() stack[-1][key] = _m if match.group('eblock') is None: stack.append(_m) if match.group('sblock') is None: expect_bracket = True else: if match.group('vq_end') is None and match.group('qval') is not None: try: line += next(fp) continue except StopIteration: raise SyntaxError("vdf.parse: unexpected EOF (open quote for value?)", (getattr(fp, 'name', '<%s>' % fp.__class__.__name__), lineno, 0, line)) stack[-1][key] = _unescape(val) if escaped else val break if len(stack) != 1: raise SyntaxError("vdf.parse: unclosed parenthasis or quotes (EOF)", (getattr(fp, 'name', '<%s>' % fp.__class__.__name__), lineno, 0, line)) return stack.pop() def loads(s, **kwargs): if not isinstance(s, string_type): raise TypeError("Expected s to be a str, got %s" % type(s)) try: fp = unicodeIO(s) except TypeError: fp = strIO(s) return parse(fp, **kwargs) def load(fp, **kwargs): return parse(fp, **kwargs) def dumps(obj, pretty=False, escaped=True): if not isinstance(obj, Mapping): raise TypeError("Expected data to be an instance of``dict``") if not isinstance(pretty, bool): raise TypeError("Expected pretty to be of type bool") if not isinstance(escaped, bool): raise TypeError("Expected escaped to be of type bool") return ''.join(_dump_gen(obj, pretty, escaped))
MIT License
grafeasgroup/tor
test/validation/helpers.py
load_valid_transcription_from_file
python
def load_valid_transcription_from_file(name: str) -> str: file = open(os.path.join(VALID_TRANSCRIPTION_PATH, name), encoding="utf-8") return file.read()
Load a transcription from the transcriptions/valid folder.
https://github.com/grafeasgroup/tor/blob/ebd27b6a6809257239ec439784f5469ebe322ab0/test/validation/helpers.py#L17-L20
import os from typing import List VALID_TRANSCRIPTION_PATH = os.path.join("test", "validation", "transcriptions", "valid") INVALID_TRANSCRIPTION_PATH = os.path.join( "test", "validation", "transcriptions", "invalid" ) def load_invalid_transcription_from_file(name: str) -> str: file = open(os.path.join(INVALID_TRANSCRIPTION_PATH, name), encoding="utf-8") return file.read()
MIT License
ucas-vg/tov_mmdetection
mmdet/datasets/pipelines/scale_match.py
ReAspect.__init__
python
def __init__(self, aspects: tuple): self.aspects = aspects
:param aspects: (h/w, ...)
https://github.com/ucas-vg/tov_mmdetection/blob/119515939057350d154f077b749ebc9693eb9886/mmdet/datasets/pipelines/scale_match.py#L355-L359
import numpy as np from PIL import Image from torchvision.transforms import functional as F import json import warnings from copy import deepcopy from math import inf import random from mmdet.core.bbox.coder.bouding_box import BoxList from ..builder import PIPELINES from .transforms import Resize PIL_RESIZE_MODE = {'bilinear': Image.BILINEAR, 'nearest': Image.NEAREST} @PIPELINES.register_module() class ScaleMatchResize(Resize): def __init__(self, scale_match_type, backend='cv2', filter_box_size_th=2, *args, **kwargs): super(ScaleMatchResize, self).__init__(keep_ratio=True, backend=backend) if scale_match_type == 'ScaleMatch': self.scale_match = ScaleMatch(*args, **kwargs) elif scale_match_type == 'MonotonicityScaleMatch': self.scale_match = MonotonicityScaleMatch(*args, **kwargs) elif scale_match_type == 'GaussianScaleMatch': self.scale_match = GaussianScaleMatch(*args, **kwargs) else: raise ValueError("type must be chose in ['ScaleMatch', 'MonotonicityScaleMatch'" "'GaussianScaleMatch'], but {} got".format(type)) self.filter_box_size_th = filter_box_size_th def filter_small_bbox(self, results): if 'gt_bboxes' in results: bbox = results['gt_bboxes'] keep = np.logical_and((bbox[:, 2] - bbox[:, 0] + 1) >= self.filter_box_size_th, (bbox[:, 3] - bbox[:, 1] + 1) >= self.filter_box_size_th) for key in ['gt_bboxes', 'gt_labels']: results[key] = results[key][keep] gt_bboxes_ignore = results['gt_bboxes_ignore'] if len(gt_bboxes_ignore) > 0: bbox = gt_bboxes_ignore keep = np.logical_and((bbox[:, 2] - bbox[:, 0] + 1) >= self.filter_box_size_th, (bbox[:, 3] - bbox[:, 1] + 1) >= self.filter_box_size_th) results['gt_bboxes_ignore'] = results['gt_bboxes_ignore'][keep] elif len(results['mask_fields']) > 0: raise NotImplementedError('not implement for mask') def __call__(self, results): assert 'scale_factor' not in results and 'scale' not in results, 'scale_factor or scale can not been specified in results for ScaleMatch' new_image_HW = self.scale_match.get_new_size(results['img_shape'][:2], results['gt_bboxes'], 'xyxy') results['scale'] = new_image_HW self._resize_img(results) self._resize_bboxes(results) self._resize_masks(results) self._resize_seg(results) self.filter_small_bbox(results) return results class ScaleMatch(object): def __init__(self, distribute=None, sizes=None, anno_file=None, bins=100, except_rate=-1., scale_range=(0., 2.), default_scale=1.0, max_sample_try=5, out_scale_deal='clip', use_log_bins=False, mode='bilinear', debug_no_image_resize=False, debug_close_record=True): assert anno_file is not None or (distribute is not None and sizes is not None) if anno_file is not None: if except_rate < 0: except_rate = 1. / bins * 2 distribute, sizes = ScaleMatch._get_distribute(json.load(open(anno_file))['annotations'], bins, except_rate, use_log_bins) self.distri_cumsum = np.cumsum(distribute) self.sizes = sizes self.mode = PIL_RESIZE_MODE[mode] self.scale_range = scale_range self.out_scale_deal = out_scale_deal assert out_scale_deal in ['clip', 'use_default_scale'] self.max_sample_try = max_sample_try self.default_scale = default_scale self.fail_time = 0 self.debug_no_image_resize = debug_no_image_resize self.debug_record = DebugScaleRecord(debug_close_record) @staticmethod def _get_distribute(annotations, bins=100, except_rate=0.1, use_log_bins=False, mu_sigma=(-1, -1)): annos = [anno for anno in annotations if not anno['iscrowd']] if len(annos) > 0 and 'ignore' in annos[0]: annos = [anno for anno in annos if not anno['ignore']] sizes = np.sqrt(np.array([anno['bbox'][2] * anno['bbox'][3] for anno in annos])) sizes = sizes[sizes > 0] if mu_sigma[0] > 0 and mu_sigma[1] > 0: print('distribute(mu, sigma): ', np.mean(sizes), np.std(sizes), end='->') sizes = (sizes - np.mean(sizes)) / np.std(sizes) sizes = sizes * mu_sigma[1] + mu_sigma[0] print(np.mean(sizes), np.std(sizes)) sizes = sizes.clip(1) if use_log_bins: sizes = np.log(sizes) sizes = np.sort(sizes) N = len(sizes) hist_sizes = sizes[int(N * except_rate / 2): int(N * (1 - except_rate / 2))] if except_rate > 0: c, s = np.histogram(hist_sizes, bins=bins - 2) c = np.array([int(N * except_rate / 2)] + c.tolist() + [N - int(N * (1 - except_rate / 2))]) s = [sizes[0]] + s.tolist() + [sizes[-1]] s = np.array(s) else: c, s = np.histogram(hist_sizes, bins=bins) c = c / len(sizes) if use_log_bins: s = np.exp(s) return c, s def _sample_by_distribute(self): r = np.random.uniform() idx = np.nonzero(r <= self.distri_cumsum + 1e-6)[0][0] mins, maxs = self.sizes[idx], self.sizes[idx + 1] ir = np.random.uniform() return (maxs - mins) * ir + mins def default_scale_deal(self, image_hw): scale = self.default_scale size = int(round(scale * image_hw[0])), int(round(scale * image_hw[1])) return size def get_new_size(self, image_hw, bbox, mode='xyxy'): target = BoxList(deepcopy(bbox), image_hw[::-1], mode) if len(target.bbox) == 0: return self.default_scale_deal(image_hw) old_mode = target.mode boxes = target.convert('xywh').bbox.cpu().numpy() sizes = np.sqrt(boxes[:, 2] * boxes[:, 3]) sizes = sizes[sizes > 0] src_size = np.exp(np.log(sizes).mean()) scale = self.default_scale for try_i in range(self.max_sample_try): dst_size = self._sample_by_distribute() _scale = dst_size / src_size if self.scale_range[1] > _scale > self.scale_range[0]: scale = _scale break self.debug_record(_scale) if self.out_scale_deal == 'clip': if _scale >= self.scale_range[1]: scale = self.scale_range[1] elif _scale <= self.scale_range[0]: scale = self.scale_range[0] size = int(round(scale * image_hw[0])), int(round(scale * image_hw[1])) target = target.convert(old_mode) target = target.resize((size[1], size[0])) if len(target.bbox) > 0: target = target[(target.bbox[:, 2] - target.bbox[:, 0] + 1) >= 2] target = target[(target.bbox[:, 3] - target.bbox[:, 1] + 1) >= 2] if len(target.bbox) == 0: self.fail_time += 1 if self.fail_time % 1 == 0: warnings.warn("Scale Matching failed for {} times, you may need to change the mean to min. " "dst_size is {}, src_size is {}, sizes".format(self.fail_time, dst_size, src_size, sizes)) return self.default_scale_deal(image_hw) return size def __call__(self, image: Image, target: BoxList): size = self.get_new_size([image.height, image.width], target.bbox, target.mode) target = target.resize((size[1], size[0])) if len(target.bbox) > 0: target = target[(target.bbox[:, 2] - target.bbox[:, 0] + 1) >= 2] target = target[(target.bbox[:, 3] - target.bbox[:, 1] + 1) >= 2] if not self.debug_no_image_resize: image = F.resize(image, size, self.mode) return image, target class MonotonicityScaleMatch(object): def __init__(self, src_anno_file, dst_anno_file, bins=100, except_rate=-1., scale_range=(0., 2.), default_scale=1.0, out_scale_deal='clip', use_log_bins=False, mode='bilinear', mu_sigma=(-1, -1), debug_no_image_resize=False, debug_close_record=False): if except_rate < 0: except_rate = 1. / bins * 2 dst_distri, dst_sizes = ScaleMatch._get_distribute(json.load(open(dst_anno_file))['annotations'], bins, except_rate, use_log_bins, mu_sigma) dst_distri_cumsum = np.cumsum(dst_distri) src_sizes = MonotonicityScaleMatch.match_distribute(json.load(open(src_anno_file))['annotations'], dst_distri_cumsum) self.src_sizes = src_sizes self.dst_sizes = dst_sizes self.default_scale = default_scale self.mode = PIL_RESIZE_MODE[mode] self.fail_time = 0 self.scale_range = scale_range self.out_scale_deal = out_scale_deal assert out_scale_deal in ['clip', 'use_default_scale'] self.debug_no_image_resize = debug_no_image_resize self.debug_record = DebugScaleRecord(debug_close_record) @staticmethod def match_distribute(src_annotations, dst_distri_cumsum): annos = [anno for anno in src_annotations if not anno['iscrowd']] if len(annos) > 0 and 'ignore' in annos[0]: annos = [anno for anno in annos if not anno['ignore']] sizes = np.sqrt(np.array([anno['bbox'][2] * anno['bbox'][3] for anno in annos])) sizes = sizes[sizes > 0] sizes = np.sort(sizes) N = len(sizes) src_sizes = [sizes[0]] for p_sum in dst_distri_cumsum: src_sizes.append(sizes[min(int(p_sum * N), N - 1)]) if src_sizes[-1] < sizes[-1]: src_sizes[-1] = sizes[-1] return np.array(src_sizes) def _sample_by_distribute(self, src_size): bin_i = np.nonzero(src_size <= self.src_sizes[1:] + 1e-6)[0][0] dst_bin_d = self.dst_sizes[bin_i + 1] - self.dst_sizes[bin_i] src_bin_d = self.src_sizes[bin_i + 1] - self.src_sizes[bin_i] dst_size = (src_size - self.src_sizes[bin_i]) / src_bin_d * dst_bin_d + self.dst_sizes[bin_i] return dst_size def default_scale_deal(self, image_hw): scale = self.default_scale size = int(round(scale * image_hw[0])), int(round(scale * image_hw[1])) return size def get_new_size(self, image_hw, bbox, mode='xyxy'): target = BoxList(deepcopy(bbox), image_hw[::-1], mode) if len(target.bbox) == 0: return self.default_scale_deal(image_hw) old_mode = target.mode boxes = target.convert('xywh').bbox.cpu().numpy() sizes = np.sqrt(boxes[:, 2] * boxes[:, 3]) sizes = sizes[sizes > 0] src_size = np.exp(np.log(sizes).mean()) dst_size = self._sample_by_distribute(src_size) scale = dst_size / src_size self.debug_record(scale) if self.out_scale_deal == 'clip': if scale >= self.scale_range[1]: scale = self.scale_range[1] elif scale <= self.scale_range[0]: scale = self.scale_range[0] else: if scale >= self.scale_range[1] or scale <= self.scale_range[0]: scale = self.default_scale size = int(round(scale * image_hw[0])), int(round(scale * image_hw[1])) target = target.convert(old_mode) target = target.resize((size[1], size[0])) if len(target.bbox) > 0: target = target[(target.bbox[:, 2] - target.bbox[:, 0] + 1) >= 2] target = target[(target.bbox[:, 3] - target.bbox[:, 1] + 1) >= 2] if len(target.bbox) == 0: self.fail_time += 1 if self.fail_time % 1 == 0: warnings.warn("Scale Matching failed for {} times, you may need to change the mean to min. " "dst_size is {}, src_size is {}, sizes".format(self.fail_time, dst_size, src_size, sizes)) return self.default_scale_deal(image_hw) return size def __call__(self, image: Image, target: BoxList): size = self.get_new_size([image.height, image.width], target.bbox, target.mode) target = target.resize((size[1], size[0])) if len(target.bbox) > 0: target = target[(target.bbox[:, 2] - target.bbox[:, 0] + 1) >= 2] target = target[(target.bbox[:, 3] - target.bbox[:, 1] + 1) >= 2] if not self.debug_no_image_resize: image = F.resize(image, size, self.mode) return image, target class ReAspect(object):
Apache License 2.0
pimoroni/pantilt-hat
library/pantilthat/pantilt.py
PanTilt._servo_range
python
def _servo_range(self, servo_index): return (self._servo_min[servo_index], self._servo_max[servo_index])
Get the min and max range values for a servo
https://github.com/pimoroni/pantilt-hat/blob/7a81e90032c6cfb97b1e9bac982901910c8469e3/library/pantilthat/pantilt.py#L168-L171
from threading import Timer import time import atexit from sys import version_info PWM = 0 WS2812 = 1 RGB = 0 GRB = 1 RGBW = 2 GRBW = 3 class PanTilt: REG_CONFIG = 0x00 REG_SERVO1 = 0x01 REG_SERVO2 = 0x03 REG_WS2812 = 0x05 REG_UPDATE = 0x4E UPDATE_WAIT = 0.03 NUM_LEDS = 24 def __init__(self, enable_lights=True, idle_timeout=2, light_mode=WS2812, light_type=RGB, servo1_min=575, servo1_max=2325, servo2_min=575, servo2_max=2325, address=0x15, i2c_bus=None): self._is_setup = False self._idle_timeout = idle_timeout self._servo1_timeout = None self._servo2_timeout = None self._i2c_retries = 10 self._i2c_retry_time = 0.01 self._enable_servo1 = False self._enable_servo2 = False self._enable_lights = enable_lights self._light_on = 0 self._servo_min = [servo1_min, servo2_min] self._servo_max = [servo1_max, servo2_max] self._light_mode = light_mode self._light_type = light_type self._i2c_address = address self._i2c = i2c_bus def setup(self): if self._is_setup: return True if self._i2c is None: try: from smbus import SMBus self._i2c = SMBus(1) except ImportError: if version_info[0] < 3: raise ImportError("This library requires python-smbus\nInstall with: sudo apt-get install python-smbus") elif version_info[0] == 3: raise ImportError("This library requires python3-smbus\nInstall with: sudo apt-get install python3-smbus") self.clear() self._set_config() atexit.register(self._atexit) self._is_setup = True def _atexit(self): if self._servo1_timeout is not None: self._servo1_timeout.cancel() if self._servo2_timeout is not None: self._servo2_timeout.cancel() self._enable_servo1 = False self._enable_servo2 = False self._set_config() def idle_timeout(self, value): self._idle_timeout = value def _set_config(self): config = 0 config |= self._enable_servo1 config |= self._enable_servo2 << 1 config |= self._enable_lights << 2 config |= self._light_mode << 3 config |= self._light_on << 4 self._i2c_write_byte(self.REG_CONFIG, config) def _check_int_range(self, value, value_min, value_max): if type(value) is not int: raise TypeError("Value should be an integer") if value < value_min or value > value_max: raise ValueError("Value {value} should be between {min} and {max}".format( value=value, min=value_min, max=value_max)) def _check_range(self, value, value_min, value_max): if value < value_min or value > value_max: raise ValueError("Value {value} should be between {min} and {max}".format( value=value, min=value_min, max=value_max)) def _servo_us_to_degrees(self, us, us_min, us_max): self._check_range(us, us_min, us_max) servo_range = us_max - us_min angle = (float(us - us_min) / float(servo_range)) * 180.0 return int(round(angle, 0)) - 90 def _servo_degrees_to_us(self, angle, us_min, us_max): self._check_range(angle, -90, 90) angle += 90 servo_range = us_max - us_min us = (servo_range / 180.0) * angle return us_min + int(us)
MIT License
jest-community/jest-pytest
src/__tests__/integration/home-assistant/homeassistant/components/light/xiaomi_miio.py
XiaomiPhilipsAbstractLight.async_turn_off
python
async def async_turn_off(self, **kwargs): await self._try_command( "Turning the light off failed.", self._light.off)
Turn the light off.
https://github.com/jest-community/jest-pytest/blob/b197b0b31e3ca5c411202d97583cbd2d2b0b92e9/src/__tests__/integration/home-assistant/homeassistant/components/light/xiaomi_miio.py#L292-L295
import asyncio from functools import partial import logging from math import ceil from datetime import timedelta import datetime import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.light import ( PLATFORM_SCHEMA, ATTR_BRIGHTNESS, SUPPORT_BRIGHTNESS, ATTR_COLOR_TEMP, SUPPORT_COLOR_TEMP, Light, ATTR_ENTITY_ID, DOMAIN, ) from homeassistant.const import (CONF_NAME, CONF_HOST, CONF_TOKEN, ) from homeassistant.exceptions import PlatformNotReady from homeassistant.util import dt _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'Xiaomi Philips Light' DATA_KEY = 'light.xiaomi_miio' CONF_MODEL = 'model' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Required(CONF_TOKEN): vol.All(cv.string, vol.Length(min=32, max=32)), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_MODEL): vol.In( ['philips.light.sread1', 'philips.light.ceiling', 'philips.light.zyceiling', 'philips.light.bulb', 'philips.light.candle', 'philips.light.candle2']), }) REQUIREMENTS = ['python-miio==0.3.9', 'construct==2.9.41'] CCT_MIN = 1 CCT_MAX = 100 DELAYED_TURN_OFF_MAX_DEVIATION_SECONDS = 4 DELAYED_TURN_OFF_MAX_DEVIATION_MINUTES = 1 SUCCESS = ['ok'] ATTR_MODEL = 'model' ATTR_SCENE = 'scene' ATTR_DELAYED_TURN_OFF = 'delayed_turn_off' ATTR_TIME_PERIOD = 'time_period' ATTR_NIGHT_LIGHT_MODE = 'night_light_mode' ATTR_AUTOMATIC_COLOR_TEMPERATURE = 'automatic_color_temperature' ATTR_REMINDER = 'reminder' ATTR_EYECARE_MODE = 'eyecare_mode' SERVICE_SET_SCENE = 'xiaomi_miio_set_scene' SERVICE_SET_DELAYED_TURN_OFF = 'xiaomi_miio_set_delayed_turn_off' SERVICE_REMINDER_ON = 'xiaomi_miio_reminder_on' SERVICE_REMINDER_OFF = 'xiaomi_miio_reminder_off' SERVICE_NIGHT_LIGHT_MODE_ON = 'xiaomi_miio_night_light_mode_on' SERVICE_NIGHT_LIGHT_MODE_OFF = 'xiaomi_miio_night_light_mode_off' SERVICE_EYECARE_MODE_ON = 'xiaomi_miio_eyecare_mode_on' SERVICE_EYECARE_MODE_OFF = 'xiaomi_miio_eyecare_mode_off' XIAOMI_MIIO_SERVICE_SCHEMA = vol.Schema({ vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, }) SERVICE_SCHEMA_SET_SCENE = XIAOMI_MIIO_SERVICE_SCHEMA.extend({ vol.Required(ATTR_SCENE): vol.All(vol.Coerce(int), vol.Clamp(min=1, max=4)) }) SERVICE_SCHEMA_SET_DELAYED_TURN_OFF = XIAOMI_MIIO_SERVICE_SCHEMA.extend({ vol.Required(ATTR_TIME_PERIOD): vol.All(cv.time_period, cv.positive_timedelta) }) SERVICE_TO_METHOD = { SERVICE_SET_DELAYED_TURN_OFF: { 'method': 'async_set_delayed_turn_off', 'schema': SERVICE_SCHEMA_SET_DELAYED_TURN_OFF}, SERVICE_SET_SCENE: { 'method': 'async_set_scene', 'schema': SERVICE_SCHEMA_SET_SCENE}, SERVICE_REMINDER_ON: {'method': 'async_reminder_on'}, SERVICE_REMINDER_OFF: {'method': 'async_reminder_off'}, SERVICE_NIGHT_LIGHT_MODE_ON: {'method': 'async_night_light_mode_on'}, SERVICE_NIGHT_LIGHT_MODE_OFF: {'method': 'async_night_light_mode_off'}, SERVICE_EYECARE_MODE_ON: {'method': 'async_eyecare_mode_on'}, SERVICE_EYECARE_MODE_OFF: {'method': 'async_eyecare_mode_off'}, } async def async_setup_platform(hass, config, async_add_devices, discovery_info=None): from miio import Device, DeviceException if DATA_KEY not in hass.data: hass.data[DATA_KEY] = {} host = config.get(CONF_HOST) name = config.get(CONF_NAME) token = config.get(CONF_TOKEN) model = config.get(CONF_MODEL) _LOGGER.info("Initializing with host %s (token %s...)", host, token[:5]) devices = [] unique_id = None if model is None: try: miio_device = Device(host, token) device_info = miio_device.info() model = device_info.model unique_id = "{}-{}".format(model, device_info.mac_address) _LOGGER.info("%s %s %s detected", model, device_info.firmware_version, device_info.hardware_version) except DeviceException: raise PlatformNotReady if model == 'philips.light.sread1': from miio import PhilipsEyecare light = PhilipsEyecare(host, token) primary_device = XiaomiPhilipsEyecareLamp( name, light, model, unique_id) devices.append(primary_device) hass.data[DATA_KEY][host] = primary_device secondary_device = XiaomiPhilipsEyecareLampAmbientLight( name, light, model, unique_id) devices.append(secondary_device) elif model in ['philips.light.ceiling', 'philips.light.zyceiling']: from miio import Ceil light = Ceil(host, token) device = XiaomiPhilipsCeilingLamp(name, light, model, unique_id) devices.append(device) hass.data[DATA_KEY][host] = device elif model in ['philips.light.bulb', 'philips.light.candle', 'philips.light.candle2']: from miio import PhilipsBulb light = PhilipsBulb(host, token) device = XiaomiPhilipsBulb(name, light, model, unique_id) devices.append(device) hass.data[DATA_KEY][host] = device else: _LOGGER.error( 'Unsupported device found! Please create an issue at ' 'https://github.com/syssi/philipslight/issues ' 'and provide the following data: %s', model) return False async_add_devices(devices, update_before_add=True) async def async_service_handler(service): method = SERVICE_TO_METHOD.get(service.service) params = {key: value for key, value in service.data.items() if key != ATTR_ENTITY_ID} entity_ids = service.data.get(ATTR_ENTITY_ID) if entity_ids: target_devices = [dev for dev in hass.data[DATA_KEY].values() if dev.entity_id in entity_ids] else: target_devices = hass.data[DATA_KEY].values() update_tasks = [] for target_device in target_devices: if not hasattr(target_device, method['method']): continue await getattr(target_device, method['method'])(**params) update_tasks.append(target_device.async_update_ha_state(True)) if update_tasks: await asyncio.wait(update_tasks, loop=hass.loop) for xiaomi_miio_service in SERVICE_TO_METHOD: schema = SERVICE_TO_METHOD[xiaomi_miio_service].get( 'schema', XIAOMI_MIIO_SERVICE_SCHEMA) hass.services.async_register( DOMAIN, xiaomi_miio_service, async_service_handler, schema=schema) class XiaomiPhilipsAbstractLight(Light): def __init__(self, name, light, model, unique_id): self._name = name self._light = light self._model = model self._unique_id = unique_id self._brightness = None self._available = False self._state = None self._state_attrs = { ATTR_MODEL: self._model, } @property def should_poll(self): return True @property def unique_id(self): return self._unique_id @property def name(self): return self._name @property def available(self): return self._available @property def device_state_attributes(self): return self._state_attrs @property def is_on(self): return self._state @property def brightness(self): return self._brightness @property def supported_features(self): return SUPPORT_BRIGHTNESS async def _try_command(self, mask_error, func, *args, **kwargs): from miio import DeviceException try: result = await self.hass.async_add_job( partial(func, *args, **kwargs)) _LOGGER.debug("Response received from light: %s", result) return result == SUCCESS except DeviceException as exc: _LOGGER.error(mask_error, exc) self._available = False return False async def async_turn_on(self, **kwargs): if ATTR_BRIGHTNESS in kwargs: brightness = kwargs[ATTR_BRIGHTNESS] percent_brightness = ceil(100 * brightness / 255.0) _LOGGER.debug( "Setting brightness: %s %s%%", brightness, percent_brightness) result = await self._try_command( "Setting brightness failed: %s", self._light.set_brightness, percent_brightness) if result: self._brightness = brightness else: await self._try_command( "Turning the light on failed.", self._light.on)
MIT License
myriadrf/pylms7002soapy
pyLMS7002Soapy/LMS7002_LimeLight.py
LMS7002_LimeLight.LML2_AQP
python
def LML2_AQP(self): return self._readReg('POS2', 'LML2_AQP<1:0>')
Get the value of LML2_AQP<1:0>
https://github.com/myriadrf/pylms7002soapy/blob/4f828eb9282c302dc6b187d91df5e77c8a6f2d61/pyLMS7002Soapy/LMS7002_LimeLight.py#L604-L608
from pyLMS7002Soapy.LMS7002_base import LMS7002_base class LMS7002_LimeLight(LMS7002_base): __slots__ = [] def __init__(self, chip): self.chip = chip self.channel = None self.prefix = "LimeLight_" @property def DIQDIRCTR2(self): return self._readReg('IOCFG', 'DIQDIRCTR2') @DIQDIRCTR2.setter def DIQDIRCTR2(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'DIQDIRCTR2', value) @property def DIQDIR2(self): return self._readReg('IOCFG', 'DIQDIR2') @DIQDIR2.setter def DIQDIR2(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'DIQDIR2', value) @property def DIQDIRCTR1(self): return self._readReg('IOCFG', 'DIQDIRCTR1') @DIQDIRCTR1.setter def DIQDIRCTR1(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'DIQDIRCTR1', value) @property def DIQDIR1(self): return self._readReg('IOCFG', 'DIQDIR1') @DIQDIR1.setter def DIQDIR1(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'DIQDIR1', value) @property def ENABLEDIRCTR2(self): return self._readReg('IOCFG', 'ENABLEDIRCTR2') @ENABLEDIRCTR2.setter def ENABLEDIRCTR2(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'ENABLEDIRCTR2', value) @property def ENABLEDIR2(self): return self._readReg('IOCFG', 'ENABLEDIR2') @ENABLEDIR2.setter def ENABLEDIR2(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'ENABLEDIR2', value) @property def ENABLEDIRCTR1(self): return self._readReg('IOCFG', 'ENABLEDIRCTR1') @ENABLEDIRCTR1.setter def ENABLEDIRCTR1(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'ENABLEDIRCTR1', value) @property def ENABLEDIR1(self): return self._readReg('IOCFG', 'ENABLEDIR1') @ENABLEDIR1.setter def ENABLEDIR1(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'ENABLEDIR1', value) @property def MOD_EN(self): return self._readReg('IOCFG', 'MOD_EN') @MOD_EN.setter def MOD_EN(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'MOD_EN', value) @property def LML2_FIDM(self): return self._readReg('IOCFG', 'LML2_FIDM') @LML2_FIDM.setter def LML2_FIDM(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'LML2_FIDM', value) @property def LML2_TXNRXIQ(self): return self._readReg('IOCFG', 'LML2_TXNRXIQ') @LML2_TXNRXIQ.setter def LML2_TXNRXIQ(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'LML2_TXNRXIQ', value) @property def LML2_MODE(self): return self._readReg('IOCFG', 'LML2_MODE') @LML2_MODE.setter def LML2_MODE(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'LML2_MODE', value) @property def LML1_FIDM(self): return self._readReg('IOCFG', 'LML1_FIDM') @LML1_FIDM.setter def LML1_FIDM(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'LML1_FIDM', value) @property def LML1_TXNRXIQ(self): return self._readReg('IOCFG', 'LML1_TXNRXIQ') @LML1_TXNRXIQ.setter def LML1_TXNRXIQ(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'LML1_TXNRXIQ', value) @property def LML1_MODE(self): return self._readReg('IOCFG', 'LML1_MODE') @LML1_MODE.setter def LML1_MODE(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'LML1_MODE', value) @property def LML1_S3S(self): return self._readReg('POS1', 'LML1_S3S<1:0>') @LML1_S3S.setter def LML1_S3S(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS1', 'LML1_S3S<1:0>', value) @property def LML1_S2S(self): return self._readReg('POS1', 'LML1_S2S<1:0>') @LML1_S2S.setter def LML1_S2S(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS1', 'LML1_S2S<1:0>', value) @property def LML1_S1S(self): return self._readReg('POS1', 'LML1_S1S<1:0>') @LML1_S1S.setter def LML1_S1S(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS1', 'LML1_S1S<1:0>', value) @property def LML1_S0S(self): return self._readReg('POS1', 'LML1_S0S<1:0>') @LML1_S0S.setter def LML1_S0S(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS1', 'LML1_S0S<1:0>', value) @property def LML1_BQP(self): return self._readReg('POS1', 'LML1_BQP<1:0>') @LML1_BQP.setter def LML1_BQP(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS1', 'LML1_BQP<1:0>', value) @property def LML1_BIP(self): return self._readReg('POS1', 'LML1_BIP<1:0>') @LML1_BIP.setter def LML1_BIP(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS1', 'LML1_BIP<1:0>', value) @property def LML1_AQP(self): return self._readReg('POS1', 'LML1_AQP<1:0>') @LML1_AQP.setter def LML1_AQP(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS1', 'LML1_AQP<1:0>', value) @property def LML1_AIP(self): return self._readReg('POS1', 'LML1_AIP<1:0>') @LML1_AIP.setter def LML1_AIP(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS1', 'LML1_AIP<1:0>', value) @property def LML1_BB2RF_PST(self): return self._readReg('BBRF1', 'LML1_BB2RF_PST<4:0>') @LML1_BB2RF_PST.setter def LML1_BB2RF_PST(self, value): if not (0 <= value <= 31): raise ValueError("Value must be [0..31]") self._writeReg('BBRF1', 'LML1_BB2RF_PST<4:0>', value) @property def LML1_BB2RF_PRE(self): return self._readReg('BBRF1', 'LML1_BB2RF_PRE<4:0>') @LML1_BB2RF_PRE.setter def LML1_BB2RF_PRE(self, value): if not (0 <= value <= 31): raise ValueError("Value must be [0..31]") self._writeReg('BBRF1', 'LML1_BB2RF_PRE<4:0>', value) @property def LML1_RF2BB_PST(self): return self._readReg('RFBB1', 'LML1_RF2BB_PST<4:0>') @LML1_RF2BB_PST.setter def LML1_RF2BB_PST(self, value): if not (0 <= value <= 31): raise ValueError("Value must be [0..31]") self._writeReg('RFBB1', 'LML1_RF2BB_PST<4:0>', value) @property def LML1_RF2BB_PRE(self): return self._readReg('RFBB1', 'LML1_RF2BB_PRE<4:0>') @LML1_RF2BB_PRE.setter def LML1_RF2BB_PRE(self, value): if not (0 <= value <= 31): raise ValueError("Value must be [0..31]") self._writeReg('RFBB1', 'LML1_RF2BB_PRE<4:0>', value) @property def LML2_S3S(self): return self._readReg('POS2', 'LML2_S3S<1:0>') @LML2_S3S.setter def LML2_S3S(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS2', 'LML2_S3S<1:0>', value) @property def LML2_S2S(self): return self._readReg('POS2', 'LML2_S2S<1:0>') @LML2_S2S.setter def LML2_S2S(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS2', 'LML2_S2S<1:0>', value) @property def LML2_S1S(self): return self._readReg('POS2', 'LML2_S1S<1:0>') @LML2_S1S.setter def LML2_S1S(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS2', 'LML2_S1S<1:0>', value) @property def LML2_S0S(self): return self._readReg('POS2', 'LML2_S0S<1:0>') @LML2_S0S.setter def LML2_S0S(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS2', 'LML2_S0S<1:0>', value) @property def LML2_BQP(self): return self._readReg('POS2', 'LML2_BQP<1:0>') @LML2_BQP.setter def LML2_BQP(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS2', 'LML2_BQP<1:0>', value) @property def LML2_BIP(self): return self._readReg('POS2', 'LML2_BIP<1:0>') @LML2_BIP.setter def LML2_BIP(self, value): if not (0 <= value <= 3): raise ValueError("Value must be [0..3]") self._writeReg('POS2', 'LML2_BIP<1:0>', value) @property
Apache License 2.0
docusign/docusign-python-client
docusign_esign/models/account_information.py
AccountInformation.billing_period_envelopes_allowed
python
def billing_period_envelopes_allowed(self): return self._billing_period_envelopes_allowed
Gets the billing_period_envelopes_allowed of this AccountInformation. # noqa: E501 Reserved: TBD # noqa: E501 :return: The billing_period_envelopes_allowed of this AccountInformation. # noqa: E501 :rtype: str
https://github.com/docusign/docusign-python-client/blob/c6aeafff0d046fa6c10a398be83ba9e24b05d4ea/docusign_esign/models/account_information.py#L335-L343
import pprint import re import six from docusign_esign.client.configuration import Configuration class AccountInformation(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'account_id_guid': 'str', 'account_name': 'str', 'account_settings': 'AccountSettingsInformation', 'allow_transaction_rooms': 'str', 'billing_period_days_remaining': 'str', 'billing_period_end_date': 'str', 'billing_period_envelopes_allowed': 'str', 'billing_period_envelopes_sent': 'str', 'billing_period_start_date': 'str', 'billing_profile': 'str', 'can_upgrade': 'str', 'connect_permission': 'str', 'created_date': 'str', 'currency_code': 'str', 'current_plan_id': 'str', 'display_appliance_start_url': 'str', 'display_appliance_url': 'str', 'distributor_code': 'str', 'docu_sign_landing_url': 'str', 'dss_values': 'dict(str, str)', 'envelope_sending_blocked': 'str', 'envelope_unit_price': 'str', 'external_account_id': 'str', 'forgotten_password_questions_count': 'str', 'is_downgrade': 'str', 'payment_method': 'str', 'plan_classification': 'str', 'plan_end_date': 'str', 'plan_name': 'str', 'plan_start_date': 'str', 'recipient_domains': 'list[RecipientDomain]', 'seats_allowed': 'str', 'seats_in_use': 'str', 'status21_cfr_part11': 'str', 'suspension_date': 'str', 'suspension_status': 'str', 'use_display_appliance': 'bool' } attribute_map = { 'account_id_guid': 'accountIdGuid', 'account_name': 'accountName', 'account_settings': 'accountSettings', 'allow_transaction_rooms': 'allowTransactionRooms', 'billing_period_days_remaining': 'billingPeriodDaysRemaining', 'billing_period_end_date': 'billingPeriodEndDate', 'billing_period_envelopes_allowed': 'billingPeriodEnvelopesAllowed', 'billing_period_envelopes_sent': 'billingPeriodEnvelopesSent', 'billing_period_start_date': 'billingPeriodStartDate', 'billing_profile': 'billingProfile', 'can_upgrade': 'canUpgrade', 'connect_permission': 'connectPermission', 'created_date': 'createdDate', 'currency_code': 'currencyCode', 'current_plan_id': 'currentPlanId', 'display_appliance_start_url': 'displayApplianceStartUrl', 'display_appliance_url': 'displayApplianceUrl', 'distributor_code': 'distributorCode', 'docu_sign_landing_url': 'docuSignLandingUrl', 'dss_values': 'dssValues', 'envelope_sending_blocked': 'envelopeSendingBlocked', 'envelope_unit_price': 'envelopeUnitPrice', 'external_account_id': 'externalAccountId', 'forgotten_password_questions_count': 'forgottenPasswordQuestionsCount', 'is_downgrade': 'isDowngrade', 'payment_method': 'paymentMethod', 'plan_classification': 'planClassification', 'plan_end_date': 'planEndDate', 'plan_name': 'planName', 'plan_start_date': 'planStartDate', 'recipient_domains': 'recipientDomains', 'seats_allowed': 'seatsAllowed', 'seats_in_use': 'seatsInUse', 'status21_cfr_part11': 'status21CFRPart11', 'suspension_date': 'suspensionDate', 'suspension_status': 'suspensionStatus', 'use_display_appliance': 'useDisplayAppliance' } def __init__(self, _configuration=None, **kwargs): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._account_id_guid = None self._account_name = None self._account_settings = None self._allow_transaction_rooms = None self._billing_period_days_remaining = None self._billing_period_end_date = None self._billing_period_envelopes_allowed = None self._billing_period_envelopes_sent = None self._billing_period_start_date = None self._billing_profile = None self._can_upgrade = None self._connect_permission = None self._created_date = None self._currency_code = None self._current_plan_id = None self._display_appliance_start_url = None self._display_appliance_url = None self._distributor_code = None self._docu_sign_landing_url = None self._dss_values = None self._envelope_sending_blocked = None self._envelope_unit_price = None self._external_account_id = None self._forgotten_password_questions_count = None self._is_downgrade = None self._payment_method = None self._plan_classification = None self._plan_end_date = None self._plan_name = None self._plan_start_date = None self._recipient_domains = None self._seats_allowed = None self._seats_in_use = None self._status21_cfr_part11 = None self._suspension_date = None self._suspension_status = None self._use_display_appliance = None self.discriminator = None setattr(self, "_{}".format('account_id_guid'), kwargs.get('account_id_guid', None)) setattr(self, "_{}".format('account_name'), kwargs.get('account_name', None)) setattr(self, "_{}".format('account_settings'), kwargs.get('account_settings', None)) setattr(self, "_{}".format('allow_transaction_rooms'), kwargs.get('allow_transaction_rooms', None)) setattr(self, "_{}".format('billing_period_days_remaining'), kwargs.get('billing_period_days_remaining', None)) setattr(self, "_{}".format('billing_period_end_date'), kwargs.get('billing_period_end_date', None)) setattr(self, "_{}".format('billing_period_envelopes_allowed'), kwargs.get('billing_period_envelopes_allowed', None)) setattr(self, "_{}".format('billing_period_envelopes_sent'), kwargs.get('billing_period_envelopes_sent', None)) setattr(self, "_{}".format('billing_period_start_date'), kwargs.get('billing_period_start_date', None)) setattr(self, "_{}".format('billing_profile'), kwargs.get('billing_profile', None)) setattr(self, "_{}".format('can_upgrade'), kwargs.get('can_upgrade', None)) setattr(self, "_{}".format('connect_permission'), kwargs.get('connect_permission', None)) setattr(self, "_{}".format('created_date'), kwargs.get('created_date', None)) setattr(self, "_{}".format('currency_code'), kwargs.get('currency_code', None)) setattr(self, "_{}".format('current_plan_id'), kwargs.get('current_plan_id', None)) setattr(self, "_{}".format('display_appliance_start_url'), kwargs.get('display_appliance_start_url', None)) setattr(self, "_{}".format('display_appliance_url'), kwargs.get('display_appliance_url', None)) setattr(self, "_{}".format('distributor_code'), kwargs.get('distributor_code', None)) setattr(self, "_{}".format('docu_sign_landing_url'), kwargs.get('docu_sign_landing_url', None)) setattr(self, "_{}".format('dss_values'), kwargs.get('dss_values', None)) setattr(self, "_{}".format('envelope_sending_blocked'), kwargs.get('envelope_sending_blocked', None)) setattr(self, "_{}".format('envelope_unit_price'), kwargs.get('envelope_unit_price', None)) setattr(self, "_{}".format('external_account_id'), kwargs.get('external_account_id', None)) setattr(self, "_{}".format('forgotten_password_questions_count'), kwargs.get('forgotten_password_questions_count', None)) setattr(self, "_{}".format('is_downgrade'), kwargs.get('is_downgrade', None)) setattr(self, "_{}".format('payment_method'), kwargs.get('payment_method', None)) setattr(self, "_{}".format('plan_classification'), kwargs.get('plan_classification', None)) setattr(self, "_{}".format('plan_end_date'), kwargs.get('plan_end_date', None)) setattr(self, "_{}".format('plan_name'), kwargs.get('plan_name', None)) setattr(self, "_{}".format('plan_start_date'), kwargs.get('plan_start_date', None)) setattr(self, "_{}".format('recipient_domains'), kwargs.get('recipient_domains', None)) setattr(self, "_{}".format('seats_allowed'), kwargs.get('seats_allowed', None)) setattr(self, "_{}".format('seats_in_use'), kwargs.get('seats_in_use', None)) setattr(self, "_{}".format('status21_cfr_part11'), kwargs.get('status21_cfr_part11', None)) setattr(self, "_{}".format('suspension_date'), kwargs.get('suspension_date', None)) setattr(self, "_{}".format('suspension_status'), kwargs.get('suspension_status', None)) setattr(self, "_{}".format('use_display_appliance'), kwargs.get('use_display_appliance', None)) @property def account_id_guid(self): return self._account_id_guid @account_id_guid.setter def account_id_guid(self, account_id_guid): self._account_id_guid = account_id_guid @property def account_name(self): return self._account_name @account_name.setter def account_name(self, account_name): self._account_name = account_name @property def account_settings(self): return self._account_settings @account_settings.setter def account_settings(self, account_settings): self._account_settings = account_settings @property def allow_transaction_rooms(self): return self._allow_transaction_rooms @allow_transaction_rooms.setter def allow_transaction_rooms(self, allow_transaction_rooms): self._allow_transaction_rooms = allow_transaction_rooms @property def billing_period_days_remaining(self): return self._billing_period_days_remaining @billing_period_days_remaining.setter def billing_period_days_remaining(self, billing_period_days_remaining): self._billing_period_days_remaining = billing_period_days_remaining @property def billing_period_end_date(self): return self._billing_period_end_date @billing_period_end_date.setter def billing_period_end_date(self, billing_period_end_date): self._billing_period_end_date = billing_period_end_date @property
MIT License
belyalov/tinyweb
tinyweb/server.py
response.add_header
python
def add_header(self, key, value): self.headers[key] = value
Add HTTP response header Arguments: key - header name value - header value Example: resp.add_header('Content-Encoding', 'gzip')
https://github.com/belyalov/tinyweb/blob/33d73c90f005cfd3423affe358cad0f13c37728f/tinyweb/server.py#L225-L235
import logging import uasyncio as asyncio import uasyncio.core import ujson as json import gc import uos as os import sys import uerrno as errno import usocket as socket log = logging.getLogger('WEB') type_gen = type((lambda: (yield))()) IS_UASYNCIO_V3 = hasattr(asyncio, "__version__") and asyncio.__version__ >= (3,) def urldecode_plus(s): s = s.replace('+', ' ') arr = s.split('%') res = arr[0] for it in arr[1:]: if len(it) >= 2: res += chr(int(it[:2], 16)) + it[2:] elif len(it) == 0: res += '%' else: res += it return res def parse_query_string(s): res = {} pairs = s.split('&') for p in pairs: vals = [urldecode_plus(x) for x in p.split('=', 1)] if len(vals) == 1: res[vals[0]] = '' else: res[vals[0]] = vals[1] return res class HTTPException(Exception): def __init__(self, code=400): self.code = code class request: def __init__(self, _reader): self.reader = _reader self.headers = {} self.method = b'' self.path = b'' self.query_string = b'' async def read_request_line(self): while True: rl = await self.reader.readline() if rl == b'\r\n' or rl == b'\n': continue break rl_frags = rl.split() if len(rl_frags) != 3: raise HTTPException(400) self.method = rl_frags[0] url_frags = rl_frags[1].split(b'?', 1) self.path = url_frags[0] if len(url_frags) > 1: self.query_string = url_frags[1] async def read_headers(self, save_headers=[]): while True: gc.collect() line = await self.reader.readline() if line == b'\r\n': break frags = line.split(b':', 1) if len(frags) != 2: raise HTTPException(400) if frags[0] in save_headers: self.headers[frags[0]] = frags[1].strip() async def read_parse_form_data(self): gc.collect() if b'Content-Length' not in self.headers: return {} if b'Content-Type' not in self.headers: return {} size = int(self.headers[b'Content-Length']) if size > self.params['max_body_size'] or size < 0: raise HTTPException(413) data = await self.reader.readexactly(size) ct = self.headers[b'Content-Type'].split(b';', 1)[0] try: if ct == b'application/json': return json.loads(data) elif ct == b'application/x-www-form-urlencoded': return parse_query_string(data.decode()) except ValueError: raise HTTPException(400) class response: def __init__(self, _writer): self.writer = _writer self.send = _writer.awrite self.code = 200 self.version = '1.0' self.headers = {} async def _send_headers(self): hdrs = 'HTTP/{} {} MSG\r\n'.format(self.version, self.code) for k, v in self.headers.items(): hdrs += '{}: {}\r\n'.format(k, v) hdrs += '\r\n' gc.collect() await self.send(hdrs) async def error(self, code, msg=None): self.code = code if msg: self.add_header('Content-Length', len(msg)) await self._send_headers() if msg: await self.send(msg) async def redirect(self, location, msg=None): self.code = 302 self.add_header('Location', location) if msg: self.add_header('Content-Length', len(msg)) await self._send_headers() if msg: await self.send(msg)
MIT License
avalente/appmetrics
appmetrics/simple_metrics.py
Counter.raw_data
python
def raw_data(self): return self.value
Return the raw value
https://github.com/avalente/appmetrics/blob/366fc7e1ca897e49a2227cbfa43bfa02a47f1acc/appmetrics/simple_metrics.py#L51-L55
import threading class Counter(object): def __init__(self): self.value = 0 self.lock = threading.Lock() def notify(self, value): value = int(value) with self.lock: self.value += value def get(self): return dict(kind="counter", value=self.value)
Apache License 2.0
vitruvianscience/opendeep
opendeep/utils/midi/MidiOutFile.py
MidiOutFile.meta_slice
python
def meta_slice(self, meta_type, data_slice): slc = fromBytes([META_EVENT, meta_type]) + writeVar(len(data_slice)) + data_slice self.event_slice(slc)
Writes a meta event
https://github.com/vitruvianscience/opendeep/blob/e96efc449101094354b615cf15afe6d03644fc36/opendeep/utils/midi/MidiOutFile.py#L165-L171
from __future__ import absolute_import from .MidiOutStream import MidiOutStream from .RawOutstreamFile import RawOutstreamFile from .constants import * from .DataTypeConverters import fromBytes, writeVar class MidiOutFile(MidiOutStream): def __init__(self, raw_out=''): self.raw_out = RawOutstreamFile(raw_out) MidiOutStream.__init__(self) def write(self): self.raw_out.write() def event_slice(self, slc): trk = self._current_track_buffer trk.writeVarLen(self.rel_time()) trk.writeSlice(slc) def note_on(self, channel=0, note=0x40, velocity=0x40): slc = fromBytes([NOTE_ON + channel, note, velocity]) self.event_slice(slc) def note_off(self, channel=0, note=0x40, velocity=0x40): slc = fromBytes([NOTE_OFF + channel, note, velocity]) self.event_slice(slc) def aftertouch(self, channel=0, note=0x40, velocity=0x40): slc = fromBytes([AFTERTOUCH + channel, note, velocity]) self.event_slice(slc) def continuous_controller(self, channel, controller, value): slc = fromBytes([CONTINUOUS_CONTROLLER + channel, controller, value]) self.event_slice(slc) def patch_change(self, channel, patch): slc = fromBytes([PATCH_CHANGE + channel, patch]) self.event_slice(slc) def channel_pressure(self, channel, pressure): slc = fromBytes([CHANNEL_PRESSURE + channel, pressure]) self.event_slice(slc) def pitch_bend(self, channel, value): msb = (value>>7) & 0xFF lsb = value & 0xFF slc = fromBytes([PITCH_BEND + channel, msb, lsb]) self.event_slice(slc) def system_exclusive(self, data): sysex_len = writeVar(len(data)+1) self.event_slice(chr(SYSTEM_EXCLUSIVE) + sysex_len + data + chr(END_OFF_EXCLUSIVE)) def midi_time_code(self, msg_type, values): value = (msg_type<<4) + values self.event_slice(fromBytes([MIDI_TIME_CODE, value])) def song_position_pointer(self, value): lsb = (value & 0x7F) msb = (value >> 7) & 0x7F self.event_slice(fromBytes([SONG_POSITION_POINTER, lsb, msb])) def song_select(self, songNumber): self.event_slice(fromBytes([SONG_SELECT, songNumber])) def tuning_request(self): self.event_slice(chr(TUNING_REQUEST)) def header(self, format=0, nTracks=1, division=96): raw = self.raw_out raw.writeSlice('MThd') bew = raw.writeBew bew(6, 4) bew(format, 2) bew(nTracks, 2) bew(division, 2) def eof(self): self.write()
Apache License 2.0
azure/azure-storage-python
azure-storage-blob/azure/storage/blob/_deserialization.py
_parse_sub_response_to_http_response
python
def _parse_sub_response_to_http_response(sub_response): empty_line = _HTTP_LINE_ENDING.encode('utf-8') num_empty_lines = 0 batch_http_sub_response = HTTPResponse(None, '', dict(), b'') try: body_stream = BytesIO() body_stream.write(sub_response.encode('utf-8')) body_stream.seek(0) while True: line = body_stream.readline() if line == b'': return batch_http_sub_response if line.startswith("HTTP".encode('utf-8')): batch_http_sub_response.status = _to_int(line.decode('utf-8').split(" ")[1]) elif line == empty_line: num_empty_lines += 1 elif line.startswith("x-ms-error-code".encode('utf-8')): batch_http_sub_response.message = line.decode('utf-8').split(": ")[1].rstrip() elif num_empty_lines is 2: batch_http_sub_response.body += line else: header = line.decode('utf-8').split(": ")[0] value = line.decode('utf-8').split(": ")[1].rstrip() batch_http_sub_response.headers[header] = value finally: body_stream.close() return batch_http_sub_response
Header: Value (1 or more times) HTTP/<version> <statusCode> <statusName> Header: Value (1 or more times) body (if any) :param sub_response: The raw bytes of this sub-response. :return: An HttpResponse object.
https://github.com/azure/azure-storage-python/blob/4306898850dd21617644fc537a57d025e833db74/azure-storage-blob/azure/storage/blob/_deserialization.py#L608-L650
from azure.common import AzureException from dateutil import parser from azure.storage.common._http import HTTPResponse try: from xml.etree import cElementTree as ETree except ImportError: from xml.etree import ElementTree as ETree from azure.storage.common._common_conversion import ( _decode_base64_to_text, _to_str, _get_content_md5 ) from azure.storage.common._deserialization import ( _parse_properties, _to_int, _parse_metadata, _convert_xml_to_signed_identifiers, _bool, ) from .models import ( Container, Blob, BlobBlock, BlobBlockList, BlobBlockState, BlobProperties, PageRange, ContainerProperties, AppendBlockProperties, PageBlobProperties, ResourceProperties, BlobPrefix, AccountInformation, UserDelegationKey, BatchSubResponse) from ._encryption import _decrypt_blob from azure.storage.common.models import _list from azure.storage.common._error import ( _validate_content_match, _ERROR_DECRYPTION_FAILURE, ) from io import BytesIO _HTTP_LINE_ENDING = "\r\n" def _parse_cpk_headers(response, properties): server_encrypted = response.headers.get('x-ms-request-server-encrypted') if server_encrypted is not None: properties.request_server_encrypted = _bool(server_encrypted) properties.encryption_key_sha256 = response.headers.get('x-ms-encryption-key-sha256') def _parse_base_properties(response): resource_properties = ResourceProperties() resource_properties.last_modified = parser.parse(response.headers.get('last-modified')) resource_properties.etag = response.headers.get('etag') _parse_cpk_headers(response, resource_properties) return resource_properties def _parse_page_properties(response): put_page = PageBlobProperties() put_page.last_modified = parser.parse(response.headers.get('last-modified')) put_page.etag = response.headers.get('etag') put_page.sequence_number = _to_int(response.headers.get('x-ms-blob-sequence-number')) _parse_cpk_headers(response, put_page) return put_page def _parse_append_block(response): append_block = AppendBlockProperties() append_block.last_modified = parser.parse(response.headers.get('last-modified')) append_block.etag = response.headers.get('etag') append_block.append_offset = _to_int(response.headers.get('x-ms-blob-append-offset')) append_block.committed_block_count = _to_int(response.headers.get('x-ms-blob-committed-block-count')) _parse_cpk_headers(response, append_block) return append_block def _parse_snapshot_blob(response, name): snapshot = response.headers.get('x-ms-snapshot') return _parse_blob(response, name, snapshot) def _parse_lease(response): lease = {'time': response.headers.get('x-ms-lease-time')} if lease['time']: lease['time'] = _to_int(lease['time']) lease['id'] = response.headers.get('x-ms-lease-id') return lease def _parse_blob(response, name, snapshot, validate_content=False, require_encryption=False, key_encryption_key=None, key_resolver_function=None, start_offset=None, end_offset=None): if response is None: return None metadata = _parse_metadata(response) props = _parse_properties(response, BlobProperties) content_settings = getattr(props, 'content_settings') if 'content-range' in response.headers: if 'x-ms-blob-content-md5' in response.headers: setattr(content_settings, 'content_md5', _to_str(response.headers['x-ms-blob-content-md5'])) else: delattr(content_settings, 'content_md5') if validate_content: computed_md5 = _get_content_md5(response.body) _validate_content_match(response.headers['content-md5'], computed_md5) if key_encryption_key is not None or key_resolver_function is not None: try: response.body = _decrypt_blob(require_encryption, key_encryption_key, key_resolver_function, response, start_offset, end_offset) except: raise AzureException(_ERROR_DECRYPTION_FAILURE) return Blob(name, snapshot, response.body, props, metadata) def _parse_container(response, name): if response is None: return None metadata = _parse_metadata(response) props = _parse_properties(response, ContainerProperties) return Container(name, props, metadata) def _convert_xml_to_signed_identifiers_and_access(response): acl = _convert_xml_to_signed_identifiers(response) acl.public_access = response.headers.get('x-ms-blob-public-access') return acl def _convert_xml_to_containers(response): if response is None or response.body is None: return None containers = _list() list_element = ETree.fromstring(response.body) setattr(containers, 'next_marker', list_element.findtext('NextMarker')) containers_element = list_element.find('Containers') for container_element in containers_element.findall('Container'): container = Container() container.name = container_element.findtext('Name') metadata_root_element = container_element.find('Metadata') if metadata_root_element is not None: container.metadata = dict() for metadata_element in metadata_root_element: container.metadata[metadata_element.tag] = metadata_element.text properties_element = container_element.find('Properties') container.properties.etag = properties_element.findtext('Etag') container.properties.last_modified = parser.parse(properties_element.findtext('Last-Modified')) container.properties.lease_status = properties_element.findtext('LeaseStatus') container.properties.lease_state = properties_element.findtext('LeaseState') container.properties.lease_duration = properties_element.findtext('LeaseDuration') container.properties.public_access = properties_element.findtext('PublicAccess') container.properties.has_immutability_policy = properties_element.findtext('HasImmutabilityPolicy') container.properties.has_legal_hold = properties_element.findtext('HasLegalHold') containers.append(container) return containers LIST_BLOBS_ATTRIBUTE_MAP = { 'Last-Modified': (None, 'last_modified', parser.parse), 'Etag': (None, 'etag', _to_str), 'x-ms-blob-sequence-number': (None, 'sequence_number', _to_int), 'BlobType': (None, 'blob_type', _to_str), 'Content-Length': (None, 'content_length', _to_int), 'ServerEncrypted': (None, 'server_encrypted', _bool), 'Content-Type': ('content_settings', 'content_type', _to_str), 'Content-Encoding': ('content_settings', 'content_encoding', _to_str), 'Content-Disposition': ('content_settings', 'content_disposition', _to_str), 'Content-Language': ('content_settings', 'content_language', _to_str), 'Content-MD5': ('content_settings', 'content_md5', _to_str), 'Cache-Control': ('content_settings', 'cache_control', _to_str), 'LeaseStatus': ('lease', 'status', _to_str), 'LeaseState': ('lease', 'state', _to_str), 'LeaseDuration': ('lease', 'duration', _to_str), 'CopyId': ('copy', 'id', _to_str), 'CopySource': ('copy', 'source', _to_str), 'CopyStatus': ('copy', 'status', _to_str), 'CopyProgress': ('copy', 'progress', _to_str), 'CopyCompletionTime': ('copy', 'completion_time', _to_str), 'CopyStatusDescription': ('copy', 'status_description', _to_str), 'AccessTier': (None, 'blob_tier', _to_str), 'AccessTierChangeTime': (None, 'blob_tier_change_time', parser.parse), 'AccessTierInferred': (None, 'blob_tier_inferred', _bool), 'ArchiveStatus': (None, 'rehydration_status', _to_str), 'DeletedTime': (None, 'deleted_time', parser.parse), 'RemainingRetentionDays': (None, 'remaining_retention_days', _to_int), 'Creation-Time': (None, 'creation_time', parser.parse), } def _convert_xml_to_blob_list(response): if response is None or response.body is None: return None blob_list = _list() list_element = ETree.fromstring(response.body) setattr(blob_list, 'next_marker', list_element.findtext('NextMarker')) blobs_element = list_element.find('Blobs') blob_prefix_elements = blobs_element.findall('BlobPrefix') if blob_prefix_elements is not None: for blob_prefix_element in blob_prefix_elements: prefix = BlobPrefix() prefix.name = blob_prefix_element.findtext('Name') blob_list.append(prefix) for blob_element in blobs_element.findall('Blob'): blob = Blob() blob.name = blob_element.findtext('Name') blob.snapshot = blob_element.findtext('Snapshot') deleted = blob_element.findtext('Deleted') if deleted: blob.deleted = _bool(deleted) properties_element = blob_element.find('Properties') if properties_element is not None: for property_element in properties_element: info = LIST_BLOBS_ATTRIBUTE_MAP.get(property_element.tag) if info is None: setattr(blob.properties, property_element.tag, _to_str(property_element.text)) elif info[0] is None: setattr(blob.properties, info[1], info[2](property_element.text)) else: attr = getattr(blob.properties, info[0]) setattr(attr, info[1], info[2](property_element.text)) metadata_root_element = blob_element.find('Metadata') if metadata_root_element is not None: blob.metadata = dict() for metadata_element in metadata_root_element: blob.metadata[metadata_element.tag] = metadata_element.text blob_list.append(blob) return blob_list def _convert_xml_to_blob_name_list(response): if response is None or response.body is None: return None blob_list = _list() list_element = ETree.fromstring(response.body) setattr(blob_list, 'next_marker', list_element.findtext('NextMarker')) blobs_element = list_element.find('Blobs') blob_prefix_elements = blobs_element.findall('BlobPrefix') if blob_prefix_elements is not None: for blob_prefix_element in blob_prefix_elements: blob_list.append(blob_prefix_element.findtext('Name')) for blob_element in blobs_element.findall('Blob'): blob_list.append(blob_element.findtext('Name')) return blob_list def _convert_xml_to_block_list(response): if response is None or response.body is None: return None block_list = BlobBlockList() list_element = ETree.fromstring(response.body) committed_blocks_element = list_element.find('CommittedBlocks') if committed_blocks_element is not None: for block_element in committed_blocks_element.findall('Block'): block_id = _decode_base64_to_text(block_element.findtext('Name', '')) block_size = int(block_element.findtext('Size')) block = BlobBlock(id=block_id, state=BlobBlockState.Committed) block._set_size(block_size) block_list.committed_blocks.append(block) uncommitted_blocks_element = list_element.find('UncommittedBlocks') if uncommitted_blocks_element is not None: for block_element in uncommitted_blocks_element.findall('Block'): block_id = _decode_base64_to_text(block_element.findtext('Name', '')) block_size = int(block_element.findtext('Size')) block = BlobBlock(id=block_id, state=BlobBlockState.Uncommitted) block._set_size(block_size) block_list.uncommitted_blocks.append(block) return block_list def _convert_xml_to_page_ranges(response): if response is None or response.body is None: return None page_list = list() list_element = ETree.fromstring(response.body) for page_range_element in list_element: if page_range_element.tag == 'PageRange': is_cleared = False elif page_range_element.tag == 'ClearRange': is_cleared = True else: pass page_list.append( PageRange( int(page_range_element.findtext('Start')), int(page_range_element.findtext('End')), is_cleared ) ) return page_list def _parse_account_information(response): account_info = AccountInformation() account_info.sku_name = response.headers['x-ms-sku-name'] account_info.account_kind = response.headers['x-ms-account-kind'] return account_info def _convert_xml_to_user_delegation_key(response): if response is None or response.body is None: return None delegation_key = UserDelegationKey() key_element = ETree.fromstring(response.body) delegation_key.signed_oid = key_element.findtext('SignedOid') delegation_key.signed_tid = key_element.findtext('SignedTid') delegation_key.signed_start = key_element.findtext('SignedStart') delegation_key.signed_expiry = key_element.findtext('SignedExpiry') delegation_key.signed_service = key_element.findtext('SignedService') delegation_key.signed_version = key_element.findtext('SignedVersion') delegation_key.value = key_element.findtext('Value') return delegation_key def _ingest_batch_response(batch_response, batch_sub_requests): parsed_batch_sub_response_list = [] response_delimiter = batch_response.headers.get('content-type').split("=")[1] response_body = batch_response.body.decode('utf-8') sub_response_list = response_body.split("--" + response_delimiter + _HTTP_LINE_ENDING) sub_response_list[len(sub_response_list) - 1] = sub_response_list[len(sub_response_list) - 1].split(_HTTP_LINE_ENDING + "--" + response_delimiter + "--")[0] for sub_response in sub_response_list: if len(sub_response) != 0: http_response = _parse_sub_response_to_http_response(sub_response) is_successful = 200 <= http_response.status < 300 index_of_sub_request = _to_int(http_response.headers.get('Content-ID')) batch_sub_request = batch_sub_requests[index_of_sub_request] parsed_batch_sub_response_list.append(BatchSubResponse(is_successful, http_response, batch_sub_request)) return parsed_batch_sub_response_list
MIT License
tensorflow/model-analysis
tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py
MetricsPlotsAndValidationsWriter
python
def MetricsPlotsAndValidationsWriter( output_paths: Dict[Text, Text], eval_config: config_pb2.EvalConfig, add_metrics_callbacks: Optional[List[types.AddMetricsCallbackType]] = None, metrics_key: Text = constants.METRICS_KEY, plots_key: Text = constants.PLOTS_KEY, attributions_key: Text = constants.ATTRIBUTIONS_KEY, validations_key: Text = constants.VALIDATIONS_KEY, output_file_format: Text = '', rubber_stamp: Optional[bool] = False) -> writer.Writer: return writer.Writer( stage_name='WriteMetricsAndPlots', ptransform=_WriteMetricsPlotsAndValidations( output_paths=output_paths, eval_config=eval_config, add_metrics_callbacks=add_metrics_callbacks or [], metrics_key=metrics_key, plots_key=plots_key, attributions_key=attributions_key, validations_key=validations_key, output_file_format=output_file_format, rubber_stamp=rubber_stamp))
Returns metrics and plots writer. Note, sharding will be enabled by default if a output_file_format is provided. The files will be named <output_path>-SSSSS-of-NNNNN.<output_file_format> where SSSSS is the shard number and NNNNN is the number of shards. Args: output_paths: Output paths keyed by output key (e.g. 'metrics', 'plots', 'validation'). eval_config: Eval config. add_metrics_callbacks: Optional list of metric callbacks (if used). metrics_key: Name to use for metrics key in Evaluation output. plots_key: Name to use for plots key in Evaluation output. attributions_key: Name to use for attributions key in Evaluation output. validations_key: Name to use for validations key in Evaluation output. output_file_format: File format to use when saving files. Currently 'tfrecord' and 'parquet' are supported. If using parquet, the output metrics and plots files will contain two columns: 'slice_key' and 'serialized_value'. The 'slice_key' column will be a structured column matching the metrics_for_slice_pb2.SliceKey proto. the 'serialized_value' column will contain a serialized MetricsForSlice or PlotsForSlice proto. The validation result file will contain a single column 'serialized_value' which will contain a single serialized ValidationResult proto. rubber_stamp: True if this model is being rubber stamped. When a model is rubber stamped diff thresholds will be ignored if an associated baseline model is not passed.
https://github.com/tensorflow/model-analysis/blob/6814617c50e073f8d039b96b03b19fef39fa0008/tensorflow_model_analysis/writers/metrics_plots_and_validations_writer.py#L513-L562
from __future__ import absolute_import from __future__ import division from __future__ import print_function import itertools import os from typing import Any, Dict, Iterable, Iterator, List, Optional, Text, Tuple, Union from absl import logging import apache_beam as beam import numpy as np import pyarrow as pa import six import tensorflow as tf from tensorflow_model_analysis import constants from tensorflow_model_analysis import types from tensorflow_model_analysis.evaluators import evaluator from tensorflow_model_analysis.evaluators import metrics_validator from tensorflow_model_analysis.metrics import metric_specs from tensorflow_model_analysis.metrics import metric_types from tensorflow_model_analysis.post_export_metrics import metric_keys from tensorflow_model_analysis.proto import config_pb2 from tensorflow_model_analysis.proto import metrics_for_slice_pb2 from tensorflow_model_analysis.proto import validation_result_pb2 from tensorflow_model_analysis.slicer import slicer_lib as slicer from tensorflow_model_analysis.utils import math_util from tensorflow_model_analysis.writers import writer _PARQUET_FORMAT = 'parquet' _TFRECORD_FORMAT = 'tfrecord' _SUPPORTED_FORMATS = (_PARQUET_FORMAT, _TFRECORD_FORMAT) _SLICE_KEY_PARQUET_COLUMN_NAME = 'slice_key' _SERIALIZED_VALUE_PARQUET_COLUMN_NAME = 'serialized_value' _SINGLE_SLICE_KEYS_PARQUET_FIELD_NAME = 'single_slice_specs' _SLICE_KEY_ARROW_TYPE = pa.struct([(pa.field( _SINGLE_SLICE_KEYS_PARQUET_FIELD_NAME, pa.list_( pa.struct([ pa.field('column', pa.string()), pa.field('bytes_value', pa.binary()), pa.field('float_value', pa.float32()), pa.field('int64_value', pa.int64()) ]))))]) _SLICED_PARQUET_SCHEMA = pa.schema([ pa.field(_SLICE_KEY_PARQUET_COLUMN_NAME, _SLICE_KEY_ARROW_TYPE), pa.field(_SERIALIZED_VALUE_PARQUET_COLUMN_NAME, pa.binary()) ]) _UNSLICED_PARQUET_SCHEMA = pa.schema( [pa.field(_SERIALIZED_VALUE_PARQUET_COLUMN_NAME, pa.binary())]) _SliceKeyDictPythonType = Dict[Text, List[Dict[Text, Union[bytes, float, int]]]] def _match_all_files(file_path: Text) -> Text: return file_path + '*' def _parquet_column_iterator(paths: Iterable[str], column_name: str) -> Iterator[pa.Buffer]: dataset = pa.parquet.ParquetDataset(paths) table = dataset.read(columns=[column_name]) for record_batch in table.to_batches(): value_array = record_batch.column(0) for value in value_array: yield value.as_buffer() def _raw_value_iterator( paths: Iterable[Text], output_file_format: Text) -> Iterator[Union[pa.Buffer, bytes]]: if output_file_format == _PARQUET_FORMAT: return _parquet_column_iterator(paths, _SERIALIZED_VALUE_PARQUET_COLUMN_NAME) elif not output_file_format or output_file_format == _TFRECORD_FORMAT: return itertools.chain(*(tf.compat.v1.python_io.tf_record_iterator(path) for path in paths)) raise ValueError('Formats "{}" are currently supported but got ' 'output_file_format={}'.format(_SUPPORTED_FORMATS, output_file_format)) def load_and_deserialize_metrics( output_path: Text, output_file_format: Text = '', slice_specs: Optional[Iterable[slicer.SingleSliceSpec]] = None ) -> Iterator[metrics_for_slice_pb2.MetricsForSlice]: if tf.io.gfile.isdir(output_path): output_path = os.path.join(output_path, constants.METRICS_KEY) pattern = _match_all_files(output_path) if output_file_format: pattern = pattern + '.' + output_file_format paths = tf.io.gfile.glob(pattern) for value in _raw_value_iterator(paths, output_file_format): metrics = metrics_for_slice_pb2.MetricsForSlice.FromString(value) if slice_specs and not slicer.slice_key_matches_slice_specs( slicer.deserialize_slice_key(metrics.slice_key), slice_specs): continue yield metrics def load_and_deserialize_plots( output_path: Text, output_file_format: Text = '', slice_specs: Optional[Iterable[slicer.SingleSliceSpec]] = None ) -> Iterator[metrics_for_slice_pb2.PlotsForSlice]: if tf.io.gfile.isdir(output_path): output_path = os.path.join(output_path, constants.PLOTS_KEY) pattern = _match_all_files(output_path) if output_file_format: pattern = pattern + '.' + output_file_format paths = tf.io.gfile.glob(pattern) for value in _raw_value_iterator(paths, output_file_format): plots = metrics_for_slice_pb2.PlotsForSlice.FromString(value) if slice_specs and not slicer.slice_key_matches_slice_specs( slicer.deserialize_slice_key(plots.slice_key), slice_specs): continue yield plots def load_and_deserialize_attributions( output_path: Text, output_file_format: Text = '', slice_specs: Optional[Iterable[slicer.SingleSliceSpec]] = None ) -> Iterator[metrics_for_slice_pb2.AttributionsForSlice]: if tf.io.gfile.isdir(output_path): output_path = os.path.join(output_path, constants.ATTRIBUTIONS_KEY) pattern = _match_all_files(output_path) if output_file_format: pattern = pattern + '.' + output_file_format paths = tf.io.gfile.glob(pattern) for value in _raw_value_iterator(paths, output_file_format): attributions = metrics_for_slice_pb2.AttributionsForSlice.FromString(value) if slice_specs and not slicer.slice_key_matches_slice_specs( slicer.deserialize_slice_key(attributions.slice_key), slice_specs): continue yield attributions def load_and_deserialize_validation_result( output_path: Text, output_file_format: Text = '') -> validation_result_pb2.ValidationResult: if tf.io.gfile.isdir(output_path): output_path = os.path.join(output_path, constants.VALIDATIONS_KEY) pattern = _match_all_files(output_path) if output_file_format: pattern = pattern + '.' + output_file_format validation_records = [] paths = tf.io.gfile.glob(pattern) for value in _raw_value_iterator(paths, output_file_format): validation_records.append( validation_result_pb2.ValidationResult.FromString(value)) assert len(validation_records) == 1 return validation_records[0] def _convert_to_array_value( array: np.ndarray) -> metrics_for_slice_pb2.ArrayValue: result = metrics_for_slice_pb2.ArrayValue() result.shape[:] = array.shape if array.dtype == 'int32': result.data_type = metrics_for_slice_pb2.ArrayValue.INT32 result.int32_values[:] = array.flatten() elif array.dtype == 'int64': result.data_type = metrics_for_slice_pb2.ArrayValue.INT64 result.int64_values[:] = array.flatten() elif array.dtype == 'float32': result.data_type = metrics_for_slice_pb2.ArrayValue.FLOAT32 result.float32_values[:] = array.flatten() elif array.dtype == 'float64': result.data_type = metrics_for_slice_pb2.ArrayValue.FLOAT64 result.float64_values[:] = array.flatten() else: result.data_type = metrics_for_slice_pb2.ArrayValue.BYTES result.bytes_values[:] = [ tf.compat.as_bytes(x) for x in array.astype(six.text_type).flatten() ] return result def convert_metric_value_to_proto( value: types.MetricValueType) -> metrics_for_slice_pb2.MetricValue: if isinstance(value, types.StructuredMetricValue): return value.to_proto() result = metrics_for_slice_pb2.MetricValue() if isinstance(value, six.binary_type): result.bytes_value = value elif isinstance(value, six.text_type): result.bytes_value = value.encode('utf8') elif isinstance(value, np.ndarray): result.array_value.CopyFrom(_convert_to_array_value(value)) else: try: result.double_value.value = float(value) except (TypeError, ValueError) as e: result.unknown_type.value = str(value) result.unknown_type.error = e.message return result def convert_slice_metrics_to_proto( metrics: Tuple[slicer.SliceKeyOrCrossSliceKeyType, Dict[Any, Any]], add_metrics_callbacks: List[types.AddMetricsCallbackType] ) -> metrics_for_slice_pb2.MetricsForSlice: result = metrics_for_slice_pb2.MetricsForSlice() slice_key, slice_metrics = metrics if slicer.is_cross_slice_key(slice_key): result.cross_slice_key.CopyFrom(slicer.serialize_cross_slice_key(slice_key)) else: result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key)) slice_metrics = slice_metrics.copy() if metric_keys.ERROR_METRIC in slice_metrics: logging.warning('Error for slice: %s with error message: %s ', slice_key, slice_metrics[metric_keys.ERROR_METRIC]) result.metrics[metric_keys.ERROR_METRIC].debug_message = slice_metrics[ metric_keys.ERROR_METRIC] return result if add_metrics_callbacks and (not any( isinstance(k, metric_types.MetricKey) for k in slice_metrics.keys())): for add_metrics_callback in add_metrics_callbacks: if hasattr(add_metrics_callback, 'populate_stats_and_pop'): add_metrics_callback.populate_stats_and_pop(slice_key, slice_metrics, result.metrics) for key in sorted(slice_metrics.keys()): value = slice_metrics[key] if isinstance(value, types.ValueWithTDistribution): unsampled_value = value.unsampled_value _, lower_bound, upper_bound = ( math_util.calculate_confidence_interval(value)) confidence_interval = metrics_for_slice_pb2.ConfidenceInterval( lower_bound=convert_metric_value_to_proto(lower_bound), upper_bound=convert_metric_value_to_proto(upper_bound), standard_error=convert_metric_value_to_proto( value.sample_standard_deviation), degrees_of_freedom={'value': value.sample_degrees_of_freedom}) metric_value = convert_metric_value_to_proto(unsampled_value) if isinstance(key, metric_types.MetricKey): result.metric_keys_and_values.add( key=key.to_proto(), value=metric_value, confidence_interval=confidence_interval) else: if metric_value.WhichOneof('type') == 'double_value': metric_value.bounded_value.value.value = unsampled_value metric_value.bounded_value.lower_bound.value = lower_bound metric_value.bounded_value.upper_bound.value = upper_bound metric_value.bounded_value.methodology = ( metrics_for_slice_pb2.BoundedValue.POISSON_BOOTSTRAP) result.metrics[key].CopyFrom(metric_value) else: metric_value = convert_metric_value_to_proto(value) if isinstance(key, metric_types.MetricKey): result.metric_keys_and_values.add( key=key.to_proto(), value=metric_value) else: result.metrics[key].CopyFrom(metric_value) return result def convert_slice_plots_to_proto( plots: Tuple[slicer.SliceKeyOrCrossSliceKeyType, Dict[Any, Any]], add_metrics_callbacks: List[types.AddMetricsCallbackType] ) -> metrics_for_slice_pb2.PlotsForSlice: result = metrics_for_slice_pb2.PlotsForSlice() slice_key, slice_plots = plots if slicer.is_cross_slice_key(slice_key): result.cross_slice_key.CopyFrom(slicer.serialize_cross_slice_key(slice_key)) else: result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key)) slice_plots = slice_plots.copy() if metric_keys.ERROR_METRIC in slice_plots: logging.warning('Error for slice: %s with error message: %s ', slice_key, slice_plots[metric_keys.ERROR_METRIC]) error_metric = slice_plots.pop(metric_keys.ERROR_METRIC) result.plots[metric_keys.ERROR_METRIC].debug_message = error_metric return result if add_metrics_callbacks and (not any( isinstance(k, metric_types.MetricKey) for k in slice_plots.keys())): for add_metrics_callback in add_metrics_callbacks: if hasattr(add_metrics_callback, 'populate_plots_and_pop'): add_metrics_callback.populate_plots_and_pop(slice_plots, result.plots) plots_by_key = {} for key in sorted(slice_plots.keys()): value = slice_plots[key] if isinstance(key, metric_types.MetricKey): parent_key = key._replace(name=None) else: continue if parent_key not in plots_by_key: key_and_value = result.plot_keys_and_values.add() key_and_value.key.CopyFrom(parent_key.to_proto()) plots_by_key[parent_key] = key_and_value.value if isinstance(value, metrics_for_slice_pb2.CalibrationHistogramBuckets): plots_by_key[parent_key].calibration_histogram_buckets.CopyFrom(value) slice_plots.pop(key) elif isinstance(value, metrics_for_slice_pb2.ConfusionMatrixAtThresholds): plots_by_key[parent_key].confusion_matrix_at_thresholds.CopyFrom(value) slice_plots.pop(key) elif isinstance( value, metrics_for_slice_pb2.MultiClassConfusionMatrixAtThresholds): plots_by_key[ parent_key].multi_class_confusion_matrix_at_thresholds.CopyFrom(value) slice_plots.pop(key) elif isinstance( value, metrics_for_slice_pb2.MultiLabelConfusionMatrixAtThresholds): plots_by_key[ parent_key].multi_label_confusion_matrix_at_thresholds.CopyFrom(value) slice_plots.pop(key) if slice_plots: if add_metrics_callbacks is None: add_metrics_callbacks = [] raise NotImplementedError( 'some plots were not converted or popped. keys: %s. ' 'add_metrics_callbacks were: %s' % ( slice_plots.keys(), [ x.name for x in add_metrics_callbacks ])) return result def convert_slice_attributions_to_proto( attributions: Tuple[slicer.SliceKeyOrCrossSliceKeyType, Dict[Any, Dict[Text, Any]]] ) -> metrics_for_slice_pb2.AttributionsForSlice: result = metrics_for_slice_pb2.AttributionsForSlice() slice_key, slice_attributions = attributions if slicer.is_cross_slice_key(slice_key): result.cross_slice_key.CopyFrom(slicer.serialize_cross_slice_key(slice_key)) else: result.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key)) slice_attributions = slice_attributions.copy() for key in sorted(slice_attributions.keys()): key_and_value = result.attributions_keys_and_values.add() key_and_value.key.CopyFrom(key.to_proto()) for feature, value in slice_attributions[key].items(): attribution_value = metrics_for_slice_pb2.MetricValue() if isinstance(value, six.binary_type): attribution_value.bytes_value = value elif isinstance(value, six.text_type): attribution_value.bytes_value = value.encode('utf8') elif isinstance(value, np.ndarray) and value.size != 1: attribution_value.array_value.CopyFrom(_convert_to_array_value(value)) else: try: attribution_value.double_value.value = float(value) except (TypeError, ValueError) as e: attribution_value.unknown_type.value = str(value) attribution_value.unknown_type.error = e.message key_and_value.values[feature].CopyFrom(attribution_value) return result
Apache License 2.0
google-research/federated
analytics/utils/non_iid_histograms.py
generate_non_iid_distributions_dirichlet
python
def generate_non_iid_distributions_dirichlet( num_users: int, ref_distribution: np.ndarray, distribution_delta: float, rng=np.random.default_rng()) -> np.ndarray: if num_users <= 0: raise ValueError('num_users must be positive. Found num_users={num_users}.') if distribution_delta < 0: raise ValueError(f'distribution_delta must be non-negative.' f'Found distribution_delta={distribution_delta}') if ref_distribution.ndim != 1: raise ValueError(f'ref_distribution must be a 1-D array.' f'Found dimension={ref_distribution.ndim}.') if (ref_distribution < 0).any() | (ref_distribution > 1).any(): raise ValueError('Expecting elements in ref_distribution to be in [0, 1].') if np.sum(ref_distribution) != 1: raise ValueError(f'ref_distribution should sum up to 1.' f'Found the sum to be {np.sum(ref_distribution)}.') if distribution_delta > 0: distributions = rng.dirichlet( alpha=ref_distribution / distribution_delta, size=num_users) else: distributions = np.tile(ref_distribution, (num_users, 1)) return distributions
Generate discrete distributions around a reference distribution. Args: num_users: An integer indicating the total number of users. Must be positive. ref_distribution: A 1-D `numpy` array representing the reference discrete distribution. Must be non-negative and sum to one. distribution_delta: A non-negative `float` indicating the level of perturbation around the reference distribution. distribution_delta=0: All distributions are identical. rng: A numpy random generator. Returns: distributions: A list of Numpy arrays with the same size as `ref_distribution`. Contains all the distributions. If distribution_delta=0, then all distributions are `ref_distribution`. Otherwise, the distributions are generated according to Dirichlet(`ref_distribution`/`distribution_delta`)
https://github.com/google-research/federated/blob/909953fa8945cfac01328e0a6d878e1dc0376c3c/analytics/utils/non_iid_histograms.py#L64-L106
import numpy as np def generate_non_iid_poisson_counts( num_users: int, iid_param: float, avg_count: float, rng: np.random.Generator = np.random.default_rng() ) -> np.ndarray: if num_users <= 0: raise ValueError(f'num_users must be positive.' f'Found num_users={num_users}.') if iid_param < 0: raise ValueError(f'iid_param must be non-negative.' f'Found iid_param={iid_param}') if avg_count < 0: raise ValueError(f'avg_count must be non-negative.' f'Found avg_count={avg_count}') if iid_param > 0: lambdas = rng.dirichlet(alpha=np.ones(num_users) / iid_param) counts = rng.poisson(lam=num_users * lambdas * avg_count) else: counts = rng.poisson(lam=avg_count, size=num_users) return counts
Apache License 2.0
nervanasystems/ngraph-neon
src/neon/frontend/activation.py
Softmax.__call__
python
def __call__(self, x): return ng.softmax(x)
Returns the Softmax value. Arguments: x (Tensor or optree): Input value Returns: Tensor or optree: Output activation
https://github.com/nervanasystems/ngraph-neon/blob/8988ab90ee81c8b219ea5c374702e56d7f383302/src/neon/frontend/activation.py#L154-L164
from __future__ import division from builtins import object import neon as ng class Rectlin(object): def __init__(self, slope=0, **kwargs): self.slope = slope def __call__(self, x): if self.slope == 0: return ng.relu(x, axes=x.axes) else: return ng.maximum(x, 0) + self.slope * ng.minimum(0, x) class Rectlinclip(object): def __init__(self, slope=0, cutoff=20.0, name=None): self.cutoff = cutoff self.slope = slope def __call__(self, x): if self.slope == 0: return ng.minimum(ng.maximum(x, 0), self.cutoff) else: return ng.minimum(ng.maximum(x, 0) + self.slope * ng.minimum(x, 0), self.cutoff) class Identity(object): def __call__(self, x): return x class Explin(object): def __init__(self, alpha=1.0): self.alpha = alpha def __call__(self, x): return ng.maximum(x, 0) + self.alpha * (ng.exp(ng.minimum(x, 0)) - 1) class Normalizer(object): def __init__(self, divisor=128.): self.divisor = divisor def __call__(self, x): return x / self.divisor class Softmax(object):
Apache License 2.0
perslev/u-time
utime/sequences/multi_sequence.py
ValidationMultiSequence.batch_size
python
def batch_size(self, value): for s in self.sequences: s.batch_size = value
Updates the batch size on all stores sequnce objects Args: value: (int) New batch size to set
https://github.com/perslev/u-time/blob/f7c8e3f1368f43226872a69b0fbb8c29990e4bd9/utime/sequences/multi_sequence.py#L210-L218
import numpy as np from utime.sequences.base_sequence import _BaseSequence from mpunet.logging import ScreenLogger from utime.errors import NotLoadedError def _assert_comparable_sequencers(sequencers): tests = (([], "margin"), ([], "batch_size"), ([], "n_classes"), ([], "data_per_period"), ([], "n_channels")) for s in sequencers: for list_, key in tests: list_.append(getattr(s, key)) for list_, key in tests: if not all(np.asarray(list_) == list_[0]): raise ValueError("All sequences must have the same '{}' " "property. Got {}.".format(key, list_)) class MultiSequence(_BaseSequence): def __init__(self, sequencers, batch_size, dataset_sample_alpha=0.5, no_log=False, logger=None): _assert_comparable_sequencers(sequencers) super().__init__() self.logger = logger or ScreenLogger() self.sequences = sequencers self.sequences_idxs = np.arange(len(self.sequences)) self.batch_size = batch_size self.margin = sequencers[0].margin self.n_classes = sequencers[0].n_classes n_samples = [len(s.dataset_queue) for s in sequencers] linear = n_samples / np.sum(n_samples) uniform = np.array([1/len(self.sequences)] * len(self.sequences)) self.alpha = dataset_sample_alpha self.sample_prob = (1-self.alpha) * linear + self.alpha * uniform for s in self.sequences: s.batch_size = 1 if not no_log: self.log() def log(self): self.logger("[*] MultiSequence initialized:\n" " --- Contains {} sequences\n" " --- Sequence IDs: {}\n" " --- Sequence sample probs (alpha={}): {}\n" " --- Batch shape: {}" "".format(len(self.sequences), ", ".join(s.identifier for s in self.sequences), self.alpha, self.sample_prob, self.batch_shape)) def __len__(self): try: return np.sum([len(s) for s in self.sequences]) except NotLoadedError: return 10000 @property def batch_shape(self): bs = self.sequences[0].batch_shape bs[0] = self.batch_size return bs @property def total_periods(self): return np.sum([s.total_periods for s in self.sequences]) def get_class_counts(self): counts = np.zeros(shape=[self.sequences[0].n_classes], dtype=np.int) for seq in self.sequences: counts += seq.get_class_counts() return counts def get_class_frequencies(self): counts = self.get_class_counts() return counts / np.sum(counts) def __getitem__(self, idx): self.seed() sequences_idxs = np.random.choice(self.sequences_idxs, size=self.batch_size, replace=True, p=self.sample_prob) X, y = self.get_empty_batch_arrays() for i, sequence_idx in enumerate(sequences_idxs): sequence = self.sequences[sequence_idx] try: xx, yy = sequence.get_class_balanced_random_period() except AttributeError: xx, yy = sequence.get_random_period() X[i] = xx y[i] = yy return self.sequences[0].process_batch(X, y) class ValidationMultiSequence: def __init__(self, sequences, no_log=False, logger=None): _assert_comparable_sequencers(sequences) self.sequences = sequences self.IDs = [s.identifier.split("/")[0] for s in self.sequences] self.n_classes = self.sequences[0].n_classes self.logger = logger or ScreenLogger() if not no_log: self.log() def log(self): self.logger("[*] ValidationMultiSequence initialized:\n" " --- Contains {} sequences\n" " --- Sequence IDs: {}" "".format(len(self.sequences), ", ".join(self.IDs))) def __len__(self): return np.sum([len(s) for s in self.sequences]) def get_minimum_total_periods(self): vspe = [] for vs in self.sequences: vspe.append(vs.total_periods) return np.min(vspe) @property def batch_size(self): return self.sequences[0].batch_size @batch_size.setter
MIT License
googleapis/python-aiplatform
google/cloud/aiplatform_v1beta1/services/job_service/client.py
JobServiceClient.hyperparameter_tuning_job_path
python
def hyperparameter_tuning_job_path( project: str, location: str, hyperparameter_tuning_job: str, ) -> str: return "projects/{project}/locations/{location}/hyperparameterTuningJobs/{hyperparameter_tuning_job}".format( project=project, location=location, hyperparameter_tuning_job=hyperparameter_tuning_job, )
Returns a fully-qualified hyperparameter_tuning_job string.
https://github.com/googleapis/python-aiplatform/blob/c1c2326b2342ab1b6f4c4ce3852e63376eae740d/google/cloud/aiplatform_v1beta1/services/job_service/client.py#L280-L288
from collections import OrderedDict from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.transport import mtls from google.auth.transport.grpc import SslCredentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.api_core import operation as gac_operation from google.api_core import operation_async from google.cloud.aiplatform_v1beta1.services.job_service import pagers from google.cloud.aiplatform_v1beta1.types import batch_prediction_job from google.cloud.aiplatform_v1beta1.types import ( batch_prediction_job as gca_batch_prediction_job, ) from google.cloud.aiplatform_v1beta1.types import completion_stats from google.cloud.aiplatform_v1beta1.types import custom_job from google.cloud.aiplatform_v1beta1.types import custom_job as gca_custom_job from google.cloud.aiplatform_v1beta1.types import data_labeling_job from google.cloud.aiplatform_v1beta1.types import ( data_labeling_job as gca_data_labeling_job, ) from google.cloud.aiplatform_v1beta1.types import encryption_spec from google.cloud.aiplatform_v1beta1.types import explanation from google.cloud.aiplatform_v1beta1.types import hyperparameter_tuning_job from google.cloud.aiplatform_v1beta1.types import ( hyperparameter_tuning_job as gca_hyperparameter_tuning_job, ) from google.cloud.aiplatform_v1beta1.types import io from google.cloud.aiplatform_v1beta1.types import job_service from google.cloud.aiplatform_v1beta1.types import job_state from google.cloud.aiplatform_v1beta1.types import machine_resources from google.cloud.aiplatform_v1beta1.types import manual_batch_tuning_parameters from google.cloud.aiplatform_v1beta1.types import model_deployment_monitoring_job from google.cloud.aiplatform_v1beta1.types import ( model_deployment_monitoring_job as gca_model_deployment_monitoring_job, ) from google.cloud.aiplatform_v1beta1.types import model_monitoring from google.cloud.aiplatform_v1beta1.types import operation as gca_operation from google.cloud.aiplatform_v1beta1.types import study from google.protobuf import duration_pb2 from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 from google.protobuf import struct_pb2 from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 from google.type import money_pb2 from .transports.base import JobServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import JobServiceGrpcTransport from .transports.grpc_asyncio import JobServiceGrpcAsyncIOTransport class JobServiceClientMeta(type): _transport_registry = OrderedDict() _transport_registry["grpc"] = JobServiceGrpcTransport _transport_registry["grpc_asyncio"] = JobServiceGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[JobServiceTransport]: if label: return cls._transport_registry[label] return next(iter(cls._transport_registry.values())) class JobServiceClient(metaclass=JobServiceClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint mtls_endpoint_re = re.compile( r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?" ) m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() if mtls or not googledomain: return api_endpoint if sandbox: return api_endpoint.replace( "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") DEFAULT_ENDPOINT = "aiplatform.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( DEFAULT_ENDPOINT ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials return cls(*args, **kwargs) @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @property def transport(self) -> JobServiceTransport: return self._transport @staticmethod def batch_prediction_job_path( project: str, location: str, batch_prediction_job: str, ) -> str: return "projects/{project}/locations/{location}/batchPredictionJobs/{batch_prediction_job}".format( project=project, location=location, batch_prediction_job=batch_prediction_job, ) @staticmethod def parse_batch_prediction_job_path(path: str) -> Dict[str, str]: m = re.match( r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/batchPredictionJobs/(?P<batch_prediction_job>.+?)$", path, ) return m.groupdict() if m else {} @staticmethod def custom_job_path(project: str, location: str, custom_job: str,) -> str: return "projects/{project}/locations/{location}/customJobs/{custom_job}".format( project=project, location=location, custom_job=custom_job, ) @staticmethod def parse_custom_job_path(path: str) -> Dict[str, str]: m = re.match( r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/customJobs/(?P<custom_job>.+?)$", path, ) return m.groupdict() if m else {} @staticmethod def data_labeling_job_path( project: str, location: str, data_labeling_job: str, ) -> str: return "projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}".format( project=project, location=location, data_labeling_job=data_labeling_job, ) @staticmethod def parse_data_labeling_job_path(path: str) -> Dict[str, str]: m = re.match( r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/dataLabelingJobs/(?P<data_labeling_job>.+?)$", path, ) return m.groupdict() if m else {} @staticmethod def dataset_path(project: str, location: str, dataset: str,) -> str: return "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: m = re.match( r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$", path, ) return m.groupdict() if m else {} @staticmethod def endpoint_path(project: str, location: str, endpoint: str,) -> str: return "projects/{project}/locations/{location}/endpoints/{endpoint}".format( project=project, location=location, endpoint=endpoint, ) @staticmethod def parse_endpoint_path(path: str) -> Dict[str, str]: m = re.match( r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/endpoints/(?P<endpoint>.+?)$", path, ) return m.groupdict() if m else {} @staticmethod
Apache License 2.0
mecademic/python_driver
MecademicRobot/RobotController.py
RobotController.SetBlending
python
def SetBlending(self, p): raw_cmd = 'SetBlending' cmd = self._build_command(raw_cmd,[p]) return self.exchange_msg(cmd)
Sets the blending of the Mecademic Robot. Parameters ---------- p : int Enable(1-100)/Disable(0) Mecademic Robot's blending. Returns ------- response : string Returns receive decrypted response.
https://github.com/mecademic/python_driver/blob/da81cc3058ecb81ac5ba1101a6d4dfff4283e192/MecademicRobot/RobotController.py#L683-L699
import socket import time class RobotController: def __init__(self, address): self.address = address self.socket = None self.EOB = 1 self.EOM = 1 self.error = False self.queue = False def is_in_error(self): return self.error def ResetError(self): self.error = False cmd = 'ResetError' response = self.exchange_msg(cmd) reset_success = self._response_contains(response, ['The error was reset', 'There was no error to reset']) if reset_success: self.error = False else: self.error = True return response def connect(self): try: self.socket = socket.socket() self.socket.settimeout(0.1) try: self.socket.connect((self.address, 10000)) except socket.timeout: raise TimeoutError if self.socket is None: raise RuntimeError self.socket.settimeout(10) try: response = self.socket.recv(1024).decode('ascii') except socket.timeout: raise RuntimeError if self._response_contains(response, ['[3001]']): print(f'Another user is already connected, closing connection.') elif self._response_contains(response, ['[3000]']): return True else: print(f'Unexpected code returned.') print(f'response: {response}') raise RuntimeError except TimeoutError: return False except RuntimeError: return False def disconnect(self): if(self.socket is not None): self.socket.close() self.socket = None @staticmethod def _response_contains(response, code_list): response_found = False for code in code_list: if response.find(code) != -1: response_found = True break return response_found def _send(self, cmd): if self.socket is None or self.error: return False cmd = cmd + '\0' status = 0 while status == 0: try: status = self.socket.send(cmd.encode('ascii')) except: break if status != 0: return True return False def _receive(self, answer_list, delay=20): if self.socket is None: return response_list = [] response_found = False for x in answer_list: response_list.append(f'[{str(x)}]') error_found = False error_list = [f'[{str(i)}]' for i in range(1000, 1039)]+[f'[{str(i)}]' for i in [3001,3003,3005,3009,3014,3026]] self.socket.settimeout(delay) while not response_found and not error_found: try: response = self.socket.recv(1024).decode('ascii') except socket.timeout: return if(len(response_list)!=0): response_found = self._response_contains(response, response_list) error_found = self._response_contains(response, error_list) if error_found: self.error = True return response def exchange_msg(self, cmd, delay=20, decode=True): response_list = self._get_answer_list(cmd) if(not self.error): status = self._send(cmd) if status is True: if self.queue: return else: answer = self._receive(response_list, delay) if answer is not None: for response in response_list: if self._response_contains(answer, [str(response)]): if(decode): return self._decode_msg(answer, response) else: return answer error_list = [str(i) for i in range(1000, 1039)]+[str(i) for i in [3001,3003,3005,3009,3014,3026]] for response in error_list: if self._response_contains(answer, [str(response)]): if(decode): return self._decode_msg(answer, response) else: return answer else: if(len(response_list) == 0): return else: return self.disconnect() time.sleep(1) self.connect() return def _build_command(self, cmd, arg_list=[]): command = cmd if(len(arg_list)!=0): command = command + '(' for index in range(0, (len(arg_list)-1)): command = command+str(arg_list[index])+',' command = command+str(arg_list[-1])+')' return command def _decode_msg(self, response, response_key): code = response.replace('['+str(response_key)+'][', '').replace(']', '').replace('\x00', '') code_list = code.split(',') if(response_key == 2026 or response_key == 2027): code_list_float = tuple((float(x) for x in code_list)) return code_list_float elif(response_key == 2029 or response_key == 2007 or response_key == 2079): code_list_int = tuple((int(x) for x in code_list)) return code_list_int else: return code def _get_answer_list(self, command): if(command.find('ActivateRobot') != -1): return [2000,2001] elif(command.find('ActivateSim')!= -1): return [2045] elif(command.find('ClearMotion')!= -1): return [2044] elif(command.find('DeactivateRobot')!= -1): return [2004] elif(command.find('BrakesOn')!= -1): return [2010] elif(command.find('BrakesOff')!= -1): return [2008] elif(command.find('GetConf')!= -1): return [2029] elif(command.find('GetJoints')!= -1): return [2026] elif(command.find('GetStatusRobot')!= -1): return [2007] elif(command.find('GetStatusGripper')!= -1): return [2079] elif(command.find('GetPose')!= -1): return [2027] elif(command.find('Home')!= -1): return [2002,2003] elif(command.find('PauseMotion')!= -1): answer_list = [2042] if(self.EOM == 1): answer_list.append(3004) return answer_list elif(command.find('ResetError')!= -1): return [2005,2006] elif(command.find('ResumeMotion')!= -1): return [2043] elif(command.find('SetEOB')!= -1): return [2054,2055] elif(command.find('SetEOM')!= -1): return [2052,2053] else: answer_list = [] if(self.EOB==1): answer_list.append(3012) if(self.EOM==1): for name in ['MoveJoints','MoveLin','MoveLinRelTRF','MoveLinRelWRF','MovePose','SetCartAcc','SetJointAcc','SetTRF','SetWRF']: if(command.find(name) != -1): answer_list.append(3004) break return answer_list def ActivateRobot(self): cmd = 'ActivateRobot' return self.exchange_msg(cmd) def DeactivateRobot(self): cmd = 'DeactivateRobot' return self.exchange_msg(cmd) def ActivateSim(self): cmd = 'ActivateSim' return self.exchange_msg(cmd) def DeactivateSim(self): cmd = 'DeactivateSim' return self.exchange_msg(cmd) def SwitchToEtherCAT(self): cmd = 'SwitchToEtherCAT' return self.exchange_msg(cmd) def SetEOB(self, e): if(e == 1): self.EOB = 1 else: self.EOB = 0 raw_cmd = 'SetEOB' cmd = self._build_command(raw_cmd,[e]) return self.exchange_msg(cmd) def SetEOM(self, e): if(e == 1): self.EOM = 1 else: self.EOM = 0 raw_cmd = 'SetEOM' cmd = self._build_command(raw_cmd,[e]) return self.exchange_msg(cmd) def home(self): cmd = 'Home' return self.exchange_msg(cmd) def Delay(self, t): if(not isinstance(t,float)): t = float(t) raw_cmd = 'Delay' cmd = self._build_command(raw_cmd,[t]) return self.exchange_msg(cmd, t*2) def GripperOpen(self): cmd = 'GripperOpen' return self.exchange_msg(cmd) def GripperClose(self): cmd = 'GripperClose' return self.exchange_msg(cmd) def MoveJoints(self, theta_1, theta_2, theta_3, theta_4, theta_5, theta_6): raw_cmd = 'MoveJoints' cmd = self._build_command(raw_cmd,[theta_1,theta_2,theta_3,theta_4,theta_5,theta_6]) return self.exchange_msg(cmd) def MoveLin(self, x, y, z, alpha, beta, gamma): raw_cmd = 'MoveLin' cmd = self._build_command(raw_cmd,[x,y,z,alpha,beta,gamma]) return self.exchange_msg(cmd) def MoveLinRelTRF(self, x, y, z, alpha, beta, gamma): raw_cmd = 'MoveLinRelTRF' cmd = self._build_command(raw_cmd,[x,y,z,alpha,beta,gamma]) return self.exchange_msg(cmd) def MoveLinRelWRF(self, x, y, z, alpha, beta, gamma): raw_cmd = 'MoveLinRelWRF' cmd = self._build_command(raw_cmd,[x,y,z,alpha,beta,gamma]) return self.exchange_msg(cmd) def MovePose(self, x, y, z, alpha, beta, gamma): raw_cmd = 'MovePose' cmd = self._build_command(raw_cmd,[x,y,z,alpha,beta,gamma]) return self.exchange_msg(cmd)
MIT License
uavcan/pyuavcan
pyuavcan/presentation/_presentation.py
Presentation.make_subscriber_with_fixed_subject_id
python
def make_subscriber_with_fixed_subject_id( self, dtype: typing.Type[FixedPortMessageClass], queue_capacity: typing.Optional[int] = None ) -> Subscriber[FixedPortMessageClass]: return self.make_subscriber( dtype=dtype, subject_id=self._get_fixed_port_id(dtype), queue_capacity=queue_capacity )
A wrapper for :meth:`make_subscriber` that uses the fixed subject-ID associated with this type. Raises a TypeError if the type has no fixed subject-ID.
https://github.com/uavcan/pyuavcan/blob/9da3cf579b65cae4067b16c621cc3321156f17d4/pyuavcan/presentation/_presentation.py#L327-L336
from __future__ import annotations import typing import logging import asyncio import pyuavcan.util import pyuavcan.dsdl import pyuavcan.transport from ._port import OutgoingTransferIDCounter, PortFinalizer, Closable, Port from ._port import Publisher, PublisherImpl from ._port import Subscriber, SubscriberImpl from ._port import Client, ClientImpl from ._port import Server MessageClass = typing.TypeVar("MessageClass", bound=pyuavcan.dsdl.CompositeObject) ServiceClass = typing.TypeVar("ServiceClass", bound=pyuavcan.dsdl.ServiceObject) FixedPortMessageClass = typing.TypeVar("FixedPortMessageClass", bound=pyuavcan.dsdl.FixedPortCompositeObject) FixedPortServiceClass = typing.TypeVar("FixedPortServiceClass", bound=pyuavcan.dsdl.FixedPortServiceObject) _logger = logging.getLogger(__name__) class Presentation: def __init__(self, transport: pyuavcan.transport.Transport) -> None: self._transport = transport self._closed = False self._output_transfer_id_map: typing.Dict[ pyuavcan.transport.OutputSessionSpecifier, OutgoingTransferIDCounter ] = {} self._registry: typing.Dict[ typing.Tuple[typing.Type[Port[pyuavcan.dsdl.CompositeObject]], pyuavcan.transport.SessionSpecifier], Closable, ] = {} @property def output_transfer_id_map( self, ) -> typing.Dict[pyuavcan.transport.OutputSessionSpecifier, OutgoingTransferIDCounter]: return self._output_transfer_id_map @property def transport(self) -> pyuavcan.transport.Transport: return self._transport @property def loop(self) -> asyncio.AbstractEventLoop: return self._transport.loop def make_publisher(self, dtype: typing.Type[MessageClass], subject_id: int) -> Publisher[MessageClass]: if issubclass(dtype, pyuavcan.dsdl.ServiceObject): raise TypeError(f"Not a message type: {dtype}") self._raise_if_closed() _logger.debug("%s: Constructing new publisher for %r at subject-ID %d", self, dtype, subject_id) data_specifier = pyuavcan.transport.MessageDataSpecifier(subject_id) session_specifier = pyuavcan.transport.OutputSessionSpecifier(data_specifier, None) try: impl = self._registry[Publisher, session_specifier] assert isinstance(impl, PublisherImpl) except LookupError: transport_session = self._transport.get_output_session( session_specifier, self._make_payload_metadata(dtype) ) transfer_id_counter = self._output_transfer_id_map.setdefault( session_specifier, OutgoingTransferIDCounter() ) impl = PublisherImpl( dtype=dtype, transport_session=transport_session, transfer_id_counter=transfer_id_counter, finalizer=self._make_finalizer(Publisher, session_specifier), loop=self.loop, ) self._registry[Publisher, session_specifier] = impl assert isinstance(impl, PublisherImpl) return Publisher(impl, self.loop) def make_subscriber( self, dtype: typing.Type[MessageClass], subject_id: int, queue_capacity: typing.Optional[int] = None ) -> Subscriber[MessageClass]: if issubclass(dtype, pyuavcan.dsdl.ServiceObject): raise TypeError(f"Not a message type: {dtype}") self._raise_if_closed() _logger.debug( "%s: Constructing new subscriber for %r at subject-ID %d with queue limit %s", self, dtype, subject_id, queue_capacity, ) data_specifier = pyuavcan.transport.MessageDataSpecifier(subject_id) session_specifier = pyuavcan.transport.InputSessionSpecifier(data_specifier, None) try: impl = self._registry[Subscriber, session_specifier] assert isinstance(impl, SubscriberImpl) except LookupError: transport_session = self._transport.get_input_session(session_specifier, self._make_payload_metadata(dtype)) impl = SubscriberImpl( dtype=dtype, transport_session=transport_session, finalizer=self._make_finalizer(Subscriber, session_specifier), loop=self.loop, ) self._registry[Subscriber, session_specifier] = impl assert isinstance(impl, SubscriberImpl) return Subscriber(impl=impl, loop=self.loop, queue_capacity=queue_capacity) def make_client( self, dtype: typing.Type[ServiceClass], service_id: int, server_node_id: int ) -> Client[ServiceClass]: if not issubclass(dtype, pyuavcan.dsdl.ServiceObject): raise TypeError(f"Not a service type: {dtype}") self._raise_if_closed() _logger.debug( "%s: Constructing new client for %r at service-ID %d with remote server node-ID %s", self, dtype, service_id, server_node_id, ) def transfer_id_modulo_factory() -> int: return self._transport.protocol_parameters.transfer_id_modulo input_session_specifier = pyuavcan.transport.InputSessionSpecifier( pyuavcan.transport.ServiceDataSpecifier(service_id, pyuavcan.transport.ServiceDataSpecifier.Role.RESPONSE), server_node_id, ) output_session_specifier = pyuavcan.transport.OutputSessionSpecifier( pyuavcan.transport.ServiceDataSpecifier(service_id, pyuavcan.transport.ServiceDataSpecifier.Role.REQUEST), server_node_id, ) try: impl = self._registry[Client, input_session_specifier] assert isinstance(impl, ClientImpl) except LookupError: output_transport_session = self._transport.get_output_session( output_session_specifier, self._make_payload_metadata(dtype.Request) ) input_transport_session = self._transport.get_input_session( input_session_specifier, self._make_payload_metadata(dtype.Response) ) transfer_id_counter = self._output_transfer_id_map.setdefault( output_session_specifier, OutgoingTransferIDCounter() ) impl = ClientImpl( dtype=dtype, input_transport_session=input_transport_session, output_transport_session=output_transport_session, transfer_id_counter=transfer_id_counter, transfer_id_modulo_factory=transfer_id_modulo_factory, finalizer=self._make_finalizer(Client, input_session_specifier), loop=self.loop, ) self._registry[Client, input_session_specifier] = impl assert isinstance(impl, ClientImpl) return Client(impl=impl, loop=self.loop) def get_server(self, dtype: typing.Type[ServiceClass], service_id: int) -> Server[ServiceClass]: if not issubclass(dtype, pyuavcan.dsdl.ServiceObject): raise TypeError(f"Not a service type: {dtype}") self._raise_if_closed() _logger.debug("%s: Providing server for %r at service-ID %d", self, dtype, service_id) def output_transport_session_factory(client_node_id: int) -> pyuavcan.transport.OutputSession: _logger.debug("%s: %r has requested a new output session to client node %s", self, impl, client_node_id) ds = pyuavcan.transport.ServiceDataSpecifier( service_id, pyuavcan.transport.ServiceDataSpecifier.Role.RESPONSE ) return self._transport.get_output_session( pyuavcan.transport.OutputSessionSpecifier(ds, client_node_id), self._make_payload_metadata(dtype.Response), ) input_session_specifier = pyuavcan.transport.InputSessionSpecifier( pyuavcan.transport.ServiceDataSpecifier(service_id, pyuavcan.transport.ServiceDataSpecifier.Role.REQUEST), None, ) try: impl = self._registry[Server, input_session_specifier] assert isinstance(impl, Server) except LookupError: input_transport_session = self._transport.get_input_session( input_session_specifier, self._make_payload_metadata(dtype.Request) ) impl = Server( dtype=dtype, input_transport_session=input_transport_session, output_transport_session_factory=output_transport_session_factory, finalizer=self._make_finalizer(Server, input_session_specifier), loop=self.loop, ) self._registry[Server, input_session_specifier] = impl assert isinstance(impl, Server) return impl def make_publisher_with_fixed_subject_id( self, dtype: typing.Type[FixedPortMessageClass] ) -> Publisher[FixedPortMessageClass]: return self.make_publisher(dtype=dtype, subject_id=self._get_fixed_port_id(dtype))
MIT License
spcl/stencilflow
stencilflow/kernel.py
Kernel.move_forward
python
def move_forward(self, items: Dict[str, Dict]) -> None: for name in items: if len(items[name]['internal_buffer']) == 0: pass elif len(self.inputs[name] ['internal_buffer']) == 1: items[name]['internal_buffer'][0].dequeue() items[name]['internal_buffer'][0].enqueue( items[name]['delay_buffer'].dequeue()) else: index = len(items[name]['internal_buffer']) - 1 pre = items[name]['internal_buffer'][index - 1] next = items[name]['internal_buffer'][index] next.dequeue() while index > 0: next.enqueue(pre.dequeue()) next = pre index -= 1 pre = items[name]['internal_buffer'][index - 1] items[name]['internal_buffer'][0].enqueue( items[name]['delay_buffer'].dequeue())
Move all items within the internal and delay buffer one element forward. :param items: :return:
https://github.com/spcl/stencilflow/blob/28bb88e7f4251f29aecc266663bc780023ed2549/stencilflow/kernel.py#L595-L622
__author__ = "Andreas Kuster (kustera@ethz.ch)" __copyright__ = "BSD 3-Clause License" import functools import operator from typing import List, Dict import dace.dtypes import stencilflow from stencilflow.base_node_class import BaseKernelNodeClass, BaseOperationNodeClass from stencilflow.bounded_queue import BoundedQueue from stencilflow.calculator import Calculator from stencilflow.compute_graph import ComputeGraph from stencilflow.compute_graph import Name, Num, BinOp, Call, Output, Subscript, Ternary, Compare, UnaryOp class Kernel(BaseKernelNodeClass): def __init__(self, name: str, kernel_string: str, dimensions: List[int], data_type: dace.dtypes.typeclass, boundary_conditions: Dict[str, Dict[str, str]], raw_inputs, vectorization: int = 1, plot_graph: bool = False, verbose: bool = False) -> None: super().__init__(name, BoundedQueue(name="dummy", maxsize=0), data_type) self.kernel_string: str = kernel_string self.raw_inputs = raw_inputs self.dimensions: List[ int] = dimensions self.boundary_conditions: Dict[str, Dict[ str, str]] = boundary_conditions self.verbose = verbose self.vectorization = vectorization self.config: Dict = stencilflow.parse_json("kernel.config") self.calculator: Calculator = Calculator() self.all_available = False self.not_available = set() self.graph: ComputeGraph = ComputeGraph(vectorization=vectorization, dimensions=dimensions, raw_inputs=raw_inputs) self.graph.generate_graph( kernel_string ) self.graph.calculate_latency( ) self.graph.determine_inputs_outputs( ) self.graph.setup_internal_buffers() if plot_graph: self.graph.plot_graph(name + ".png") self.var_map: Dict[str, float] = dict( ) self.read_success: bool = False self.exec_success: bool = False self.result: float = float( 'nan' ) self.outputs: Dict[str, BoundedQueue] = dict() self.out_delay_queue: BoundedQueue = BoundedQueue( name="delay_output", maxsize=self.graph.max_latency + 1, collection=[None] * self.graph.max_latency) self.internal_buffer: Dict[str, BoundedQueue] = dict() self.setup_internal_buffers() self.dist_to_center: Dict = dict() self.set_up_dist_to_center() self.center_reached = False self.max_del_buf_usage = dict() self.buf_usage_sum = dict() self.buf_usage_num = dict() self.init_metric = False self.PC_exec_start = stencilflow.convert_3d_to_1d( dimensions=self.dimensions, index=self.dimensions) self.PC_exec_end = 0 def print_kernel_performance(self): print("#############################") for input in set(self.inputs).union(set(self.outputs)): print("#############################") print("input buffer name: {}".format(input)) print("max buffer usage: {}".format(self.max_del_buf_usage[input])) print("average buffer usage: {}".format(self.buf_usage_sum[input] / self.buf_usage_num[input])) print("total execution time (from first exec to last): {}".format( self.PC_exec_end - self.PC_exec_start)) def update_performance_metric(self): if not self.init_metric: for input in self.inputs: self.max_del_buf_usage[input] = 0 self.buf_usage_num[input] = 0 self.buf_usage_sum[input] = 0 for output in self.outputs: self.max_del_buf_usage[output] = 0 self.buf_usage_num[output] = 0 self.buf_usage_sum[output] = 0 for input in self.inputs: buffer = self.inputs[input] self.max_del_buf_usage[input] = max( self.max_del_buf_usage[input], len([x for x in buffer['delay_buffer'].queue if x is not None])) self.buf_usage_num[input] += 1 self.buf_usage_sum[input] += len( [x for x in buffer['delay_buffer'].queue if x is not None]) for output in self.outputs: buffer = self.outputs[output] self.max_del_buf_usage[output] = max( self.max_del_buf_usage[output], len([x for x in buffer['delay_buffer'].queue if x is not None])) self.buf_usage_num[output] += 1 self.buf_usage_sum[output] += len( [x for x in buffer['delay_buffer'].queue if x is not None]) def set_up_dist_to_center(self): for item in self.graph.accesses: furthest = max(self.graph.accesses[item]) self.dist_to_center[item] = stencilflow.convert_3d_to_1d( dimensions=self.dimensions, index=furthest) def iter_comp_tree(self, node: BaseOperationNodeClass, index_relative_to_center=True, replace_negative_index=False, python_syntax=False, flatten_index=True, output_dimensions=None) -> str: pred = list(self.graph.graph.pred[node]) if isinstance(node, BinOp): if len(pred) == 1: lhs, rhs = pred[0], pred[0] else: lhs = pred[0] rhs = pred[1] lhs_str = self.iter_comp_tree(lhs, index_relative_to_center, replace_negative_index, python_syntax, flatten_index, output_dimensions) rhs_str = self.iter_comp_tree(rhs, index_relative_to_center, replace_negative_index, python_syntax, flatten_index, output_dimensions) return "({} {} {})".format(lhs_str, node.generate_op_sym(), rhs_str) elif isinstance(node, Call): expr = pred[0] expr_str = self.iter_comp_tree(expr, index_relative_to_center, replace_negative_index, python_syntax) return "{}({})".format(node.name, expr_str) elif isinstance(node, Name) or isinstance(node, Num): return str(node.name) elif isinstance(node, Subscript): if index_relative_to_center: dim_index = node.index else: dim_index = stencilflow.list_subtract_cwise( node.index, self.graph.max_index[node.name]) if flatten_index: if node.name in self.input_paths and self.inputs[ node.name]["input_dims"] is not None: ind = [ x if x in self.inputs[node.name]["input_dims"] else None for x in stencilflow.ITERATORS ] num_dim = stencilflow.num_dims(ind) new_ind, i = list(), 0 for entry in ind: if entry is None: new_ind.append(None) else: new_ind.append(dim_index[i]) i += 1 dim_index = dim_index word_index = stencilflow.convert_3d_to_1d( dimensions=self.dimensions, index=dim_index) if replace_negative_index and word_index < 0: return node.name + "[" + "n" + str(abs(word_index)) + "]" else: return node.name + "[" + str(word_index) + "]" else: try: dim_index = [ dim_index[stencilflow.ITERATORS.index(i)] for i in self.inputs[node.name]["input_dims"] ] except (KeyError, TypeError): pass if len(dim_index) > output_dimensions: for i in range(3 - output_dimensions): if dim_index[i] != 0: raise ValueError("Removed used index dimension") dim_index = dim_index[3 - output_dimensions:] return node.name + str(dim_index) elif isinstance( node, Ternary ): compare = [x for x in pred if type(x) == Compare][0] lhs = [x for x in pred if type(x) != Compare][0] rhs = [x for x in pred if type(x) != Compare][1] compare_str = self.iter_comp_tree(compare, index_relative_to_center, replace_negative_index, python_syntax, flatten_index, output_dimensions) lhs_str = self.iter_comp_tree(lhs, index_relative_to_center, replace_negative_index, python_syntax, flatten_index, output_dimensions) rhs_str = self.iter_comp_tree(rhs, index_relative_to_center, replace_negative_index, python_syntax, flatten_index, output_dimensions) if python_syntax: return "(({}) if ({}) else ({}))".format( lhs_str, compare_str, rhs_str) else: return "(({}) ? ({}) : ({}))".format(compare_str, lhs_str, rhs_str) elif isinstance(node, Compare): lhs = pred[0] rhs = pred[1] lhs_str = self.iter_comp_tree(lhs, index_relative_to_center, replace_negative_index, python_syntax, flatten_index, output_dimensions) rhs_str = self.iter_comp_tree(rhs, index_relative_to_center, replace_negative_index, python_syntax, flatten_index, output_dimensions) return "{} {} {}".format(lhs_str, str(node.name), rhs_str) elif isinstance(node, UnaryOp): expr = pred[0] expr_str = self.iter_comp_tree( node=expr, index_relative_to_center=index_relative_to_center, replace_negative_index=replace_negative_index, python_syntax=python_syntax, flatten_index=flatten_index, output_dimensions=output_dimensions) return "({}{})".format(node.generate_op_sym(), expr_str) else: raise NotImplementedError( "iter_comp_tree is not implemented for node type {}".format( type(node))) def generate_relative_access_kernel_string(self, relative_to_center=True, replace_negative_index=False, python_syntax=False, flatten_index=True, output_dimensions=None) -> str: res = [] for n in self.graph.graph.nodes: if isinstance(n, Name) and n.name not in self.input_paths: if len(self.graph.graph.pred[n]) == 0: continue res.append(n.name + " = " + self.iter_comp_tree( list(self.graph.graph.pred[n])[0], relative_to_center, replace_negative_index, python_syntax, flatten_index, output_dimensions)) output_node = [ n for n in self.graph.graph.nodes if isinstance(n, Output) ] if len(output_node) != 1: raise Exception("Expected a single output node") output_node = output_node[0] res.append(self.name + " = " + self.iter_comp_tree( node=list(self.graph.graph.pred[output_node])[0], index_relative_to_center=relative_to_center, replace_negative_index=replace_negative_index, python_syntax=python_syntax, flatten_index=flatten_index, output_dimensions=output_dimensions)) return "; ".join(res) def reset_old_compute_state(self) -> None: self.var_map = dict() self.read_success = False self.exec_success = False self.result = None def remove_duplicate_accesses(self, inp: List) -> List: tuple_set = set(tuple(row) for row in inp) return [list(t) for t in tuple_set] def setup_internal_buffers(self) -> None: for item in self.graph.accesses: self.graph.accesses[item] = self.remove_duplicate_accesses( self.graph.accesses[item]) for buf_name in self.graph.buffer_size: self.internal_buffer[buf_name]: List[BoundedQueue] = list() list.sort(self.graph.accesses[buf_name], reverse=True) if len(self.graph.accesses[buf_name]) == 0: pass elif len(self.graph.accesses[buf_name]) == 1: self.internal_buffer[buf_name].append( BoundedQueue(name=buf_name, maxsize=1, collection=[None])) else: itr = self.graph.accesses[buf_name].__iter__() pre = itr.__next__() for item in itr: curr = item diff = abs( stencilflow.convert_3d_to_1d( index=stencilflow.list_subtract_cwise(pre, curr), dimensions=self.dimensions)) if diff == 0: pass else: self.internal_buffer[buf_name].append( BoundedQueue(name=buf_name, maxsize=diff, collection=[None] * diff)) pre = curr def buffer_position(self, access: BaseKernelNodeClass) -> int: return self.convert_3d_to_1d( self.graph.min_index[access.name]) - self.convert_3d_to_1d( access.index) def index_to_ijk(self, index: List[int]): if len(index) == 3: ind = stencilflow.convert_3d_to_1d(dimensions=self.dimensions, index=index) return "_{}".format(ind) if ind >= 0 else "_n{}".format(abs(ind)) else: raise NotImplementedError( "Method index_to_ijk has not been implemented for |indices|!=3, here: |indices|={}" .format(len(index))) def buffer_number(self, node: Subscript): selected = [x.index for x in self.graph.inputs if x.name == node.name] selected_unique = self.remove_duplicate_accesses(selected) ordered = sorted(selected_unique, reverse=True) result = ordered.index(node.index) return result - 1 def get_global_kernel_index(self) -> List[int]: index = self.dimensions number = self.program_counter n = len(index) all_dim = functools.reduce(operator.mul, index, 1) // index[0] output = list() for i in range(1, n + 1): output.append(number // all_dim) number -= output[-1] * all_dim if i < n: all_dim = all_dim // index[i] return output def is_out_of_bound(self, index: List[int]) -> bool: for i in range(len(index)): if index[i] < 0 or index[i] >= self.dimensions[i]: return True return False def get_data(self, inp: Subscript, global_index: List[int], relative_index: List[int]): access_index = stencilflow.list_add_cwise(global_index, relative_index) if self.is_out_of_bound(access_index): if self.boundary_conditions[inp.name]["type"] == "constant": return self.boundary_conditions[inp.name]["value"] elif self.boundary_conditions[inp.name]["type"] == "copy": raise NotImplementedError( "Copy boundary conditions have not been implemented yet.") else: raise NotImplementedError( "We currently do not support boundary conditions of type {}" .format(self.boundary_conditions[inp.name]["type"])) """ Data Access """ pos = self.buffer_number(inp) if pos == -1: return self.inputs[inp.name]["delay_buffer"].try_peek_last() elif pos >= 0: return self.inputs[inp.name]["internal_buffer"][pos].try_peek_last() def test_availability(self): all_available = True self.not_available = set() for inp in self.graph.inputs: if isinstance(inp, Num): pass elif len(self.inputs[inp.name] ['internal_buffer']) == 0: pass elif isinstance(inp, Subscript): gki = self.get_global_kernel_index() if self.is_out_of_bound( stencilflow.list_add_cwise(inp.index, gki)): pass else: index = self.buffer_number(inp) if index == -1: if self.inputs[inp.name]['delay_buffer'].try_peek_last() is None or self.inputs[inp.name]['delay_buffer'].try_peek_last() is False: all_available = False self.not_available.add(inp.name) elif 0 <= index < len(self.inputs[ inp.name]['internal_buffer']): if self.inputs[inp.name]['internal_buffer'][index].try_peek_last() is False or self.inputs[inp.name]['internal_buffer'][index].try_peek_last() is None: all_available = False self.not_available.add(inp.name) else: raise Exception("index out of bound: {}".format(index)) return all_available
BSD 3-Clause New or Revised License
jelloslinger/aiodownload
aiodownload/strategy.py
RequestStrategy.retry
python
def retry(self, response): return False
Retry the HTTP request based on the response :param response: response from an HTTP response :type response: :class:`aiohttp.ClientResponse` :return: :rtype: bool
https://github.com/jelloslinger/aiodownload/blob/29b3bc49cdaec9615933d326b338865fd903571c/aiodownload/strategy.py#L107-L117
import logging import os from .util import default_url_transform, make_dirs logger = logging.getLogger(__name__) class DownloadStrategy: def __init__(self, chunk_size=65536, home=None, skip_cached=False): self.chunk_size = chunk_size self.home = home or os.getcwd() self.skip_cached = skip_cached async def on_fail(self, bundle): make_dirs(bundle.file_path) open(bundle.file_path, 'wb+').close() async def on_success(self, response, bundle): make_dirs(bundle.file_path) with open(bundle.file_path, 'wb+') as f: while True: chunk = await response.content.read(self.chunk_size) if not chunk: break f.write(chunk) def get_file_path(self, bundle): return os.path.sep.join([ self.home, default_url_transform(bundle.url) ]) class RequestStrategy: def __init__(self, concurrent=2, max_attempts=0, timeout=60): self.concurrent = concurrent self.max_attempts = max_attempts self.timeout = timeout def assert_response(self, response): assert response.status < 400
MIT License
deepmind/pycolab
pycolab/tests/test_things.py
get_post_update
python
def get_post_update(entity, the_plot): return the_plot.setdefault('test_post_update', {}).pop( entity.character, lambda *args, **kwargs: None)
Retrieve post-update callable for `entity` for the next game iteration. Once retrieved, the post-update callable is cleared, so the callable will only be called for the current game iteration. This function is intended mainly as a helper for the `Sprite`s and `Drape`s defined in this module. Most user code will not need to use it. Args: entity: the pycolab game entity for which we wish to retrieve any post-update callable. the_plot: the `Plot` object for the pycolab game passed to `post_update` when registering a callable for `entity`. Returns: the callable registered for this entity via `post_update`, or a null callable if none was registered.
https://github.com/deepmind/pycolab/blob/6504c8065dd5322438f23a2a9026649904f3322a/pycolab/tests/test_things.py#L130-L150
from __future__ import absolute_import from __future__ import division from __future__ import print_function import unittest import numpy as np from pycolab import ascii_art from pycolab import cropping from pycolab import things as plab_things from pycolab.prefab_parts import drapes from pycolab.prefab_parts import sprites import six def pre_update(engine, character, thing_to_do): engine.the_plot.setdefault('test_pre_update', {})[character] = thing_to_do def post_update(engine, character, thing_to_do): engine.the_plot.setdefault('test_post_update', {})[character] = thing_to_do def get_pre_update(entity, the_plot): return the_plot.setdefault('test_pre_update', {}).pop( entity.character, lambda *args, **kwargs: None)
Apache License 2.0
turbulenz/turbulenz_local
turbulenz_local/models/gamelist.py
GameList.save_game_list
python
def save_game_list(self): game_paths = [game.path.encode('utf-8') for game in self._slugs.values() if game.path.is_correct()] try: f = open(config['games.yaml'], 'w') try: yaml.dump(game_paths, f) finally: f.close() except IOError, e: LOG.warn(str(e))
Save the list of games
https://github.com/turbulenz/turbulenz_local/blob/3c96c1229514dcd4893b7055599f55ed185db95a/turbulenz_local/models/gamelist.py#L106-L121
import logging import yaml from pylons import config from paste.deploy.converters import asbool from turbulenz_local.tools import get_absolute_path from turbulenz_local.models.metrics import MetricsSession from turbulenz_local.models.game import Game, GameError, GameNotFoundError from turbulenz_local.models.gamedetails import SlugDetail from turbulenz_local.lib.deploy import Deployment LOG = logging.getLogger(__name__) class SlugError(Exception): pass class GameList(object): _instance = None _reload = False @classmethod def get_instance(cls): if cls._instance is None or cls._reload: cls._instance = GameList() cls._reload = False return cls._instance def __init__(self): self._slugs = {} self._load_games() def list_all(self): return self._slugs.values() def _load_games(self): paths = load_paths(config['games.yaml']) if len(paths) != len(set(paths)): LOG.warn('duplicate paths in games.yaml found') games_root = config['games_root'] deploy_enable = asbool(config.get('deploy.enable', False)) for path in set(paths): try: game = Game(self, path, games_root=games_root, deploy_enable=deploy_enable) except GameError, e: LOG.error('error loading game from %s: %s', path, e) else: if game.slug in self._slugs.keys(): new_slug = self.make_slug_unique(game.slug) game.slug = SlugDetail(new_slug) self._slugs[game.slug] = game LOG.info('game loaded from %s', path) def _reload_game(self, slug): if slug in self._slugs: path = self._slugs.get(slug).path games_root = config['games_root'] deploy_enable = asbool(config.get('deploy.enable', False)) try: game = Game(self, path, games_root=games_root, deploy_enable=deploy_enable) except GameError, e: LOG.error('error loading game from %s: %s', path, e) else: self._slugs[game.slug] = game def change_slug(self, old_slug, new_slug): if old_slug is not None and new_slug is not None: try: game = self._slugs[old_slug] del self._slugs[old_slug] if new_slug in self._slugs.keys(): new_slug = SlugDetail(self.make_slug_unique(new_slug)) game.slug = new_slug self._slugs[new_slug] = game except KeyError: LOG.error('Error swapping slugs:' + old_slug + ' for ' + new_slug) else: MetricsSession.rename(old_slug, new_slug) cache_dir = config.get('deploy.cache_dir', None) Deployment.rename_cache(cache_dir, old_slug, new_slug)
MIT License
jminardi/python-btsync
btsync.py
BTSync.add_folder
python
def add_folder(self, path, secret=None): params = {'method': 'add_folder', 'dir': path} if secret is not None: params['secret'] = secret return self._request(params)
Adds a folder to Sync. If a secret is not specified, it will be generated automatically. The folder will have to pre-exist on the disk and Sync will add it into a list of syncing folders. Returns '0' if no errors, error code and error message otherwise. { "error": 0 } http://[address]:[port]/api?method=add_folder&dir=(folderPath)[&secret=(secret)&selective_sync=1] #noqa dir (required) - specify path to the sync folder secret (optional) - specify folder secret selective_sync (optional) - specify sync mode, selective - 1, all files (default) - 0
https://github.com/jminardi/python-btsync/blob/0f6440b446e190b66acd2ae98a1ec9dde5171b21/btsync.py#L84-L104
import httplib import base64 import urllib import json import os import tempfile BTSYNC_PATH = r'/Applications/BitTorrent\ Sync.app/Contents/MacOS/BitTorrent\ Sync' class BTSync(object): def __init__(self, btsync_path=BTSYNC_PATH, host='127.0.0.1', port='8888', user='admin', pswd='password'): self.btsync_path = btsync_path self.conn = httplib.HTTPConnection('{}:{}'.format(host, port)) auth = 'Basic ' + base64.b64encode('{}:{}'.format(user, pswd)) self.headers = {'Authorization': auth} self.config = { 'use_gui': True, 'webui': { 'listen': '{}:{}'.format(host, port), 'login': user, 'password': pswd, 'api_key': ('2WCXYTARBRSSCL3PMT2NKIMJVBFRXPU72PIVCJ73UHMVILX73' 'UGM5RVLF45ETLCEDFBGEH4P6ACYPCSSNXPQY2LP6BB6YPPVJH' 'VGQDZ3KDVNOQT2IBB5ZKM6XZ4CXA3DUMO3KBY') } } def get_folders(self, secret=None): params = {'method': 'get_folders'} if secret is not None: params['secret'] = secret return self._request(params)
MIT License
riotgames/cloud-inquisitor
backend/cloud_inquisitor/utils.py
flatten
python
def flatten(data): if not data: return data if type(data[0]) in (list, tuple): return list(flatten(data[0])) + list(flatten(data[1:])) return list(data[:1]) + list(flatten(data[1:]))
Returns a flattened version of a list. Courtesy of https://stackoverflow.com/a/12472564 Args: data (`tuple` or `list`): Input data Returns: `list`
https://github.com/riotgames/cloud-inquisitor/blob/29a26c705381fdba3538b4efedb25b9e09b387ed/backend/cloud_inquisitor/utils.py#L529-L546
import binascii import hashlib import json import logging import os import random import re import string import time import zlib from base64 import b64decode from collections import namedtuple from copy import deepcopy from datetime import datetime from difflib import Differ from functools import wraps import boto3.session import jwt import munch import pkg_resources import requests from argon2 import PasswordHasher from dateutil import parser from jinja2 import Environment, BaseLoader from cloud_inquisitor.constants import RGX_EMAIL_VALIDATION_PATTERN, RGX_BUCKET, ROLE_ADMIN, DEFAULT_CONFIG, CONFIG_FILE_PATHS from cloud_inquisitor.exceptions import InquisitorError __jwt_data = None log = logging.getLogger(__name__) NotificationContact = namedtuple('NotificationContact', ('type', 'value')) class MenuItem(object): def __init__(self, group=None, name=None, state=None, active=None, section=None, args=None, order=100): self.group = group self.name = name self.state = state self.active = active self.section = section self.args = args or {} self.order = order def to_json(self): return { 'group': self.group, 'name': self.name, 'state': self.state, 'active': self.active, 'section': self.section, 'args': self.args or {}, 'order': self.order } def deprecated(msg): def decorator(func): @wraps(func) def wrapper(*args, **kwargs): logging.getLogger(__name__).warning(msg) return func(*args, **kwargs) return wrapper return decorator def get_hash(data): return hashlib.sha256(str(data).encode('utf-8')).hexdigest() def is_truthy(value, default=False): if value is None: return False if isinstance(value, bool): return value if isinstance(value, int): return value > 0 trues = ('1', 'true', 'y', 'yes', 'ok') falses = ('', '0', 'false', 'n', 'none', 'no') if value.lower().strip() in falses: return False elif value.lower().strip() in trues: return True else: if default: return default else: raise ValueError('Invalid argument given to truthy: {0}'.format(value)) def validate_email(email, partial_match=False): rgx = re.compile(RGX_EMAIL_VALIDATION_PATTERN, re.I) if partial_match: return rgx.search(email) is not None else: return rgx.match(email) is not None def get_template(template): from cloud_inquisitor.database import db tmpl = db.Template.find_one(template_name=template) if not tmpl: raise InquisitorError('No such template found: {}'.format(template)) tmplenv = Environment(loader=BaseLoader, autoescape=True) tmplenv.filters['json_loads'] = json.loads tmplenv.filters['slack_quote_join'] = lambda data: ', '.join('`{}`'.format(x) for x in data) return tmplenv.from_string(tmpl.template) def parse_bucket_info(domain): match = RGX_BUCKET.match(domain) if match: data = match.groupdict() return data['bucket'], data['region'] or 'us-east-1' def to_utc_date(date): return datetime.utcfromtimestamp(float(date.strftime('%s'))).replace(tzinfo=None) if date else None def isoformat(date): return date.isoformat() if date else None def generate_password(length=32): return ''.join(random.SystemRandom().choice(string.ascii_letters + '!@#$+.,') for _ in range(length)) def generate_csrf_token(): return binascii.hexlify(os.urandom(32)).decode() def hash_password(password): return PasswordHasher().hash(password) def generate_jwt_token(user, authsys, **kwargs): from cloud_inquisitor.config import dbconfig token = { 'auth_system': authsys, 'exp': time.time() + dbconfig.get('session_expire_time'), 'roles': [role.name for role in user.roles] } if kwargs: token.update(**kwargs) enc = jwt.encode(token, get_jwt_key_data(), algorithm='HS512') return enc.decode() def get_jwt_key_data(): global __jwt_data if __jwt_data: return __jwt_data from cloud_inquisitor import config_path from cloud_inquisitor.config import dbconfig jwt_key_file = dbconfig.get('jwt_key_file_path', default='ssl/private.key') if not os.path.isabs(jwt_key_file): jwt_key_file = os.path.join(config_path, jwt_key_file) with open(os.path.join(jwt_key_file), 'r') as f: __jwt_data = f.read() return __jwt_data def has_access(user, required_roles, match_all=True): if ROLE_ADMIN in user.roles: return True if isinstance(required_roles, str): if required_roles in user.roles: return True return False if match_all: for role in required_roles: if role not in user.roles: return False return True else: for role in required_roles: if role in user.roles: return True return False def merge_lists(*args): out = {} for contacts in filter(None, args): for contact in contacts: out[contact.value] = contact return list(out.values()) def to_camelcase(inStr): return re.sub('_([a-z])', lambda x: x.group(1).upper(), inStr) def from_camelcase(inStr): return re.sub('[A-Z]', lambda x: '_' + x.group(0).lower(), inStr) def get_resource_id(prefix, *data): parts = flatten(data) for part in parts: if type(part) not in (str, int, float): raise ValueError('Supported data types: int, float, list, tuple, str. Got: {}'.format(type(part))) return '{}-{}'.format( prefix, get_hash('-'.join(sorted(map(str, parts))))[-16:] ) def parse_date(date_string, ignoretz=True): try: return parser.parse(date_string, ignoretz=ignoretz) except TypeError: return None def get_user_data_configuration(): from cloud_inquisitor import get_local_aws_session, app_config kms_region = app_config.kms_region session = get_local_aws_session() if session.get_credentials().method == 'iam-role': kms = session.client('kms', region_name=kms_region) else: sts = session.client('sts') audit_role = sts.assume_role(RoleArn=app_config.aws_api.instance_role_arn, RoleSessionName='cloud_inquisitor') kms = boto3.session.Session( audit_role['Credentials']['AccessKeyId'], audit_role['Credentials']['SecretAccessKey'], audit_role['Credentials']['SessionToken'], ).client('kms', region_name=kms_region) user_data_url = app_config.user_data_url res = requests.get(user_data_url) if res.status_code == 200: data = kms.decrypt(CiphertextBlob=b64decode(res.content)) kms_config = json.loads(zlib.decompress(data['Plaintext']).decode('utf-8')) app_config.database_uri = kms_config['db_uri'] else: raise RuntimeError('Failed loading user-data, cannot continue: {}: {}'.format(res.status_code, res.content)) def read_config(): def __recursive_update(old, new): out = deepcopy(old) for k, v in new.items(): if issubclass(type(v), dict): if k in old: out[k] = __recursive_update(old[k], v) else: out[k] = v else: out[k] = v return out for fpath in CONFIG_FILE_PATHS: if os.path.exists(fpath): data = munch.munchify(json.load(open(fpath, 'r'))) return os.path.dirname(fpath), munch.munchify(__recursive_update(DEFAULT_CONFIG, data)) raise FileNotFoundError('Configuration file not found')
Apache License 2.0
cogent3/cogent3
src/cogent3/evolve/likelihood_function.py
_get_keyed_rule_indices
python
def _get_keyed_rule_indices(rules): new = {} for i, rule in enumerate(rules): edges = rule.get("edges", rule.get("edge", None)) or [] edges = [edges] if type(edges) == str else edges par_name = rule["par_name"] key = frozenset([par_name] + edges) new[key] = i return new
returns {frozesent((par_name, edge1, edge2, ..)): index}
https://github.com/cogent3/cogent3/blob/3d98bddc0aef2bf7fea21b9a89de76b01f3d2da8/src/cogent3/evolve/likelihood_function.py#L50-L59
import json import random from collections import defaultdict from copy import deepcopy import numpy from cogent3.core.alignment import ArrayAlignment from cogent3.evolve import substitution_model from cogent3.evolve.simulate import AlignmentEvolver, random_sequence from cogent3.maths.matrix_exponential_integration import expected_number_subs from cogent3.maths.matrix_logarithm import is_generator_unique from cogent3.maths.measure import ( paralinear_continuous_time, paralinear_discrete_time, ) from cogent3.recalculation.definition import ParameterController from cogent3.util import table from cogent3.util.dict_array import DictArrayTemplate from cogent3.util.misc import adjusted_gt_minprob, get_object_provenance __author__ = "Peter Maxwell" __copyright__ = "Copyright 2007-2021, The Cogent Project" __credits__ = [ "Gavin Huttley", "Andrew Butterfield", "Peter Maxwell", "Matthew Wakefield", "Rob Knight", "Brett Easton", "Ben Kaehler", "Ananias Iliadis", ] __license__ = "BSD-3" __version__ = "2021.10.12a1" __maintainer__ = "Gavin Huttley" __email__ = "gavin.huttley@anu.edu.au" __status__ = "Production"
BSD 3-Clause New or Revised License
microsoft/distributeddeeplearning
{{cookiecutter.project_name}}/TensorFlow_imagenet/src/data/tfrecords.py
get_filenames
python
def get_filenames(is_training, data_dir, num_files=1014): if is_training: return [ os.path.join(data_dir, "train-%05d-of-01014" % i) for i in range(num_files) ] else: return [ os.path.join(data_dir, "validation-%05d-of-00128" % i) for i in range(128) ]
Return filenames for dataset.
https://github.com/microsoft/distributeddeeplearning/blob/2f407881b49415188ca2e38e5331781962939251/{{cookiecutter.project_name}}/TensorFlow_imagenet/src/data/tfrecords.py#L11-L20
import logging import os import horovod.tensorflow as hvd import tensorflow as tf import defaults import imagenet_preprocessing
MIT License
flexget/flexget
flexget/components/sites/sites/filelist_api.py
SearchFileList.search
python
def search(self, task, entry, config): entries = [] params = { 'username': config['username'], 'passkey': config['passkey'], 'action': 'search-torrents', } if config.get('category'): params['category'] = ( ','.join(self.valid_categories[cat] for cat in config['category']) if isinstance(config.get('category'), list) else self.valid_categories[config.get('category')] ) params.update({extra: 1 for extra in self.valid_extras if config.get(extra)}) if entry.get('series_episode'): params['episode'] = entry.get('series_episode') if entry.get('series_season'): params['season'] = entry.get('series_season') for search_title in entry.get('search_strings', [entry.get('title')]): if entry.get('imdb_id'): params['type'] = 'imdb' params['query'] = entry.get('imdb_id') params['name'] = search_title else: params['type'] = 'name' params['query'] = search_title try: response = self.get(self.api_url, params) except RequestException as e: raise plugin.PluginError(f'FileList request failed badly! {e}') results = response.json() if not results: logger.verbose('No torrent found on Filelist for `{}`', search_title) else: logger.verbose( '{} torrent(s) were found on Filelist for `{}`', len(results), search_title ) for result in results: entry = Entry() entry['title'] = result['name'] entry['url'] = result['download_link'] entry['imdb'] = result['imdb'] entry['content_size'] = result['size'] / 2 ** 20 entry['torrent_snatches'] = result['times_completed'] entry['torrent_seeds'] = result['seeders'] entry['torrent_leeches'] = result['leechers'] entry['torrent_internal'] = bool(result['internal']) entry['torrent_moderated'] = bool(result['moderated']) entry['torrent_freeleech'] = bool(result['freeleech']) entry['torrent_genres'] = [ genres.strip() for genres in result['small_description'].split(',') ] entries.append(entry) return entries
Search for entries on FileList
https://github.com/flexget/flexget/blob/e625eb09324a9d6be4cfb42601c6af4628b2226a/flexget/components/sites/sites/filelist_api.py#L88-L162
from http import HTTPStatus from loguru import logger from flexget import plugin from flexget.config_schema import one_or_more from flexget.entry import Entry from flexget.event import event from flexget.utils.requests import RequestException from flexget.utils.requests import Session as RequestSession logger = logger.bind(name='filelist') requests = RequestSession() class SearchFileList: api_url = 'https://filelist.io/api.php' valid_categories = { 'Anime': '24', 'Audio': '11', 'Desene': '15', 'Diverse': '18', 'Docs': '16', 'FLAC': '5', 'Filme 3D': '25', 'Filme 4K': '6', 'Filme 4K Blu-Ray': '26', 'Filme Blu-Ray': '20', 'Filme DVD': '2', 'Filme DVD-RO': '3', 'Filme HD': '4', 'Filme HD-RO': '19', 'Filme SD': '1', 'Jocuri Console': '10', 'Jocuri PC': '9', 'Linux': '17', 'Mobile': '22', 'Programe': '8', 'Seriale 4K': '27', 'Seriale HD': '21', 'Seriale SD': '23', 'Sport': '13', 'TV': '14', 'Videoclip': '12', 'XXX': '7', } valid_extras = ['internal', 'moderated', 'freeleech', 'doubleup'] schema = { 'type': 'object', 'properties': { 'username': {'type': 'string'}, 'passkey': {'type': 'string'}, 'category': one_or_more({'type': 'string', 'enum': list(valid_categories.keys())}), 'internal': {'type': 'boolean', 'default': False}, 'moderated': {'type': 'boolean', 'default': False}, 'freeleech': {'type': 'boolean', 'default': False}, 'doubleup': {'type': 'boolean', 'default': False}, }, 'required': ['username', 'passkey'], 'additionalProperties': False, } def get(self, url, params): try: response = requests.get(url, params=params, raise_status=False) except RequestException as e: raise plugin.PluginError(f'FileList request failed badly! {e}') if not response.ok: http_status = HTTPStatus(response.status_code) error_message = response.json().get('error', http_status.description) raise plugin.PluginError( f'FileList request failed; err code: {http_status}; err msg: `{error_message}`' ) return response @plugin.internet(logger)
MIT License
tensorflow/tensor2tensor
tensor2tensor/models/research/transformer_vae.py
attend
python
def attend(x, source, hparams, name): with tf.variable_scope(name): x = tf.squeeze(x, axis=2) if len(source.get_shape()) > 3: source = tf.squeeze(source, axis=2) source = common_attention.add_timing_signal_1d(source) y = common_attention.multihead_attention( common_layers.layer_preprocess(x, hparams), source, None, hparams.attention_key_channels or hparams.hidden_size, hparams.attention_value_channels or hparams.hidden_size, hparams.hidden_size, hparams.num_heads, hparams.attention_dropout) res = common_layers.layer_postprocess(x, y, hparams) return tf.expand_dims(res, axis=2)
Self-attention layer with source as memory antecedent.
https://github.com/tensorflow/tensor2tensor/blob/c22a226704e5887862bf9edd9f269892c9016ad4/tensor2tensor/models/research/transformer_vae.py#L63-L77
from __future__ import absolute_import from __future__ import division from __future__ import print_function import functools import math import os from six.moves import range from tensor2tensor.layers import common_attention from tensor2tensor.layers import common_image_attention as cia from tensor2tensor.layers import common_layers from tensor2tensor.layers import discretization from tensor2tensor.layers import latent_layers from tensor2tensor.layers import modalities from tensor2tensor.models import transformer from tensor2tensor.utils import beam_search from tensor2tensor.utils import contrib from tensor2tensor.utils import expert_utils from tensor2tensor.utils import registry from tensor2tensor.utils import t2t_model import tensorflow.compat.v1 as tf _DO_SUMMARIES = True def residual_conv(x, repeat, k, hparams, name, reuse=None): with tf.variable_scope(name, reuse=reuse): dilations_and_kernels = [((1, 1), k) for _ in range(3)] for i in range(repeat): with tf.variable_scope("repeat_%d" % i): y = common_layers.conv_block( common_layers.layer_norm(x, hparams.hidden_size, name="lnorm"), hparams.hidden_size, dilations_and_kernels, padding="SAME", name="residual_conv") y = tf.nn.dropout(y, 1.0 - hparams.dropout) x += y return x
Apache License 2.0
alvarobartt/investpy
investpy/indices.py
get_index_recent_data
python
def get_index_recent_data(index, country, as_json=False, order='ascending', interval='Daily'): if not index: raise ValueError("ERR#0047: index param is mandatory and should be a str.") if not isinstance(index, str): raise ValueError("ERR#0047: index param is mandatory and should be a str.") if country is None: raise ValueError("ERR#0039: country can not be None, it should be a str.") if country is not None and not isinstance(country, str): raise ValueError("ERR#0025: specified country value not valid.") if not isinstance(as_json, bool): raise ValueError("ERR#0002: as_json argument can just be True or False, bool type.") if order not in ['ascending', 'asc', 'descending', 'desc']: raise ValueError("ERR#0003: order argument can just be ascending (asc) or descending (desc), str type.") if not interval: raise ValueError("ERR#0073: interval value should be a str type and it can just be either 'Daily', 'Weekly' or 'Monthly'.") if not isinstance(interval, str): raise ValueError("ERR#0073: interval value should be a str type and it can just be either 'Daily', 'Weekly' or 'Monthly'.") interval = interval.lower() if interval not in ['daily', 'weekly', 'monthly']: raise ValueError("ERR#0073: interval value should be a str type and it can just be either 'Daily', 'Weekly' or 'Monthly'.") resource_package = 'investpy' resource_path = '/'.join(('resources', 'indices.csv')) if pkg_resources.resource_exists(resource_package, resource_path): indices = pd.read_csv(pkg_resources.resource_filename(resource_package, resource_path), keep_default_na=False) else: raise FileNotFoundError("ERR#0059: indices file not found or errored.") if indices is None: raise IOError("ERR#0037: indices not found or unable to retrieve.") country = unidecode(country.strip().lower()) if country not in get_index_countries(): raise RuntimeError("ERR#0034: country " + country + " not found, check if it is correct.") indices = indices[indices['country'] == country] index = unidecode(index.strip().lower()) if index not in list(indices['name'].apply(unidecode).str.lower()): raise RuntimeError("ERR#0045: index " + index + " not found, check if it is correct.") full_name = indices.loc[(indices['name'].apply(unidecode).str.lower() == index).idxmax(), 'full_name'] id_ = indices.loc[(indices['name'].apply(unidecode).str.lower() == index).idxmax(), 'id'] name = indices.loc[(indices['name'].apply(unidecode).str.lower() == index).idxmax(), 'name'] index_currency = indices.loc[(indices['name'].apply(unidecode).str.lower() == index).idxmax(), 'currency'] header = full_name + ' Historical Data' params = { "curr_id": id_, "smlID": str(randint(1000000, 99999999)), "header": header, "interval_sec": interval.capitalize(), "sort_col": "date", "sort_ord": "DESC", "action": "historical_data" } head = { "User-Agent": random_user_agent(), "X-Requested-With": "XMLHttpRequest", "Accept": "text/html", "Accept-Encoding": "gzip, deflate", "Connection": "keep-alive", } url = "https://www.investing.com/instruments/HistoricalDataAjax" req = requests.post(url, headers=head, data=params) if req.status_code != 200: raise ConnectionError("ERR#0015: error " + str(req.status_code) + ", try again later.") root_ = fromstring(req.text) path_ = root_.xpath(".//table[@id='curr_table']/tbody/tr") result = list() if path_: for elements_ in path_: if elements_.xpath(".//td")[0].text_content() == 'No results found': raise IndexError("ERR#0046: index information unavailable or not found.") info = [] for nested_ in elements_.xpath(".//td"): info.append(nested_.get('data-real-value')) index_date = datetime.strptime(str(datetime.fromtimestamp(int(info[0]), tz=pytz.timezone('GMT')).date()), '%Y-%m-%d') index_close = float(info[1].replace(',', '')) index_open = float(info[2].replace(',', '')) index_high = float(info[3].replace(',', '')) index_low = float(info[4].replace(',', '')) index_volume = int(info[5]) result.insert(len(result), Data(index_date, index_open, index_high, index_low, index_close, index_volume, index_currency, None)) if order in ['ascending', 'asc']: result = result[::-1] elif order in ['descending', 'desc']: result = result if as_json is True: json_ = { 'name': name, 'recent': [value.index_as_json() for value in result] } return json.dumps(json_, sort_keys=False) elif as_json is False: df = pd.DataFrame.from_records([value.index_to_dict() for value in result]) df.set_index('Date', inplace=True) return df else: raise RuntimeError("ERR#0004: data retrieval error while scraping.")
This function retrieves recent historical data from the introduced `index` from Investing via Web Scraping. The resulting data can it either be stored in a :obj:`pandas.DataFrame` or in a :obj:`json` file, with `ascending` or `descending` order. Args: index (:obj:`str`): name of the index to retrieve recent historical data from. country (:obj:`str`): name of the country from where the index is. as_json (:obj:`bool`, optional): optional argument to determine the format of the output data (:obj:`pandas.DataFrame` or :obj:`json`). order (:obj:`str`, optional): optional argument to define the order of the retrieved data (`ascending`, `asc` or `descending`, `desc`). interval (:obj:`str`, optional): value to define the historical data interval to retrieve, by default `Daily`, but it can also be `Weekly` or `Monthly`. Returns: :obj:`pandas.DataFrame` or :obj:`json`: The function returns either a :obj:`pandas.DataFrame` or a :obj:`json` file containing the retrieved recent data from the specified index via argument. The dataset contains the open, high, low, close and volume values for the selected index on market days, additionally the currency value is returned. The returned data is case we use default arguments will look like:: Date || Open | High | Low | Close | Volume | Currency -----||------|------|-----|-------|--------|---------- xxxx || xxxx | xxxx | xxx | xxxxx | xxxxxx | xxxxxxxx but if we define `as_json=True`, then the output will be:: { name: name, recent: [ { date: dd/mm/yyyy, open: x, high: x, low: x, close: x, volume: x, currency: x }, ... ] } Raises: ValueError: raised if there was an argument error. IOError: raised if indices object/file was not found or unable to retrieve. RuntimeError: raised if the introduced index does not match any of the indexed ones. ConnectionError: raised if GET requests does not return 200 status code. IndexError: raised if index information was unavailable or not found. Examples: >>> data = investpy.get_index_recent_data(index='ibex 35', country='spain') >>> data.head() Open High Low Close Volume Currency Date 2019-08-26 12604.7 12646.3 12510.4 12621.3 4770000 EUR 2019-08-27 12618.3 12723.3 12593.6 12683.8 8230000 EUR 2019-08-28 12657.2 12697.2 12585.1 12642.5 7300000 EUR 2019-08-29 12637.2 12806.6 12633.8 12806.6 5650000 EUR 2019-08-30 12767.6 12905.9 12756.9 12821.6 6040000 EUR
https://github.com/alvarobartt/investpy/blob/2e37300902f6df122931bb373fb17609477664e8/investpy/indices.py#L147-L343
from datetime import datetime, date, timedelta import pytz import json from random import randint import pandas as pd import pkg_resources import requests from unidecode import unidecode from lxml.html import fromstring from .utils.extra import random_user_agent from .utils.data import Data from .data.indices_data import indices_as_df, indices_as_list, indices_as_dict from .data.indices_data import index_countries_as_list def get_indices(country=None): return indices_as_df(country=country) def get_indices_list(country=None): return indices_as_list(country=country) def get_indices_dict(country=None, columns=None, as_json=False): return indices_as_dict(country=country, columns=columns, as_json=as_json) def get_index_countries(): return index_countries_as_list()
MIT License
mishbahr/django-connected
connected_accounts/providers/base.py
OAuth2Provider.request
python
def request(self, method, url, **kwargs): user_token = kwargs.pop('token', self.token) token, secret, expires_at = self.parse_raw_token(user_token) if token is not None: params = kwargs.get('params', {}) params['access_token'] = token kwargs['params'] = params return super(OAuth2Provider, self).request(method, url, **kwargs)
Build remote url request. Constructs necessary auth.
https://github.com/mishbahr/django-connected/blob/7ec1f042786fef2eb6c00b1479ce47c90341ba81/connected_accounts/providers/base.py#L331-L339
from __future__ import unicode_literals import json import logging from datetime import timedelta from django.conf import settings from django.utils import timezone from django.utils.crypto import constant_time_compare, get_random_string from django.utils.encoding import force_text from requests.api import request from requests.exceptions import RequestException from requests_oauthlib import OAuth1 try: from urllib.parse import urlencode, parse_qs except ImportError: from urllib import urlencode from urlparse import parse_qs logger = logging.getLogger('connected_accounts') class ProviderAccount(object): def __init__(self, account, provider): self.account = account self.provider = provider def get_profile_url(self): return '' def get_avatar_url(self): return settings.STATIC_URL + 'img/connected_accounts/icon-user-default.jpg' def to_str(self): return self.provider.to_str() def extract_common_fields(self): return {} class BaseOAuthProvider(object): id = '' name = '' account_class = ProviderAccount authorization_url = '' access_token_url = '' profile_url = '' consumer_key = '' consumer_secret = '' scope = [] scope_separator = ' ' def __init__(self, token=''): self.token = token def wrap_account(self, account): return self.account_class(account, self) def get_access_token(self, request, callback=None): raise NotImplementedError('Defined in a sub-class') def refresh_access_token(self, raw_token): raise NotImplementedError('Defined in a sub-class') def get_profile_data(self, raw_token): try: response = self.request('get', self.profile_url, token=raw_token) response.raise_for_status() except RequestException as e: logger.error('Unable to fetch user profile: {0}'.format(e)) return None else: return response.json() or response.text def get_redirect_args(self, request, callback): raise NotImplementedError('Defined in a sub-class') def get_redirect_url(self, request, callback, parameters=None): args = self.get_redirect_args(request, callback=callback) additional = parameters or {} args.update(additional) params = urlencode(args) return '{0}?{1}'.format(self.authorization_url, params) def parse_raw_token(self, raw_token): raise NotImplementedError('Defined in a sub-class') def request(self, method, url, **kwargs): return request(method, url, **kwargs) def extract_uid(self, data): return data.get('id', None) def get_scope(self, request): dynamic_scope = request.GET.get('scope', None) if dynamic_scope: self.scope.extend(dynamic_scope.split(',')) return self.scope def extract_extra_data(self, data): return data @property def session_key(self): raise NotImplementedError('Defined in a sub-class') @property def is_enabled(self): return self.consumer_key is not None and self.consumer_secret is not None def to_str(self): return force_text(self.name) class OAuthProvider(BaseOAuthProvider): request_token_url = '' def get_access_token(self, request, callback=None): raw_token = request.session.get(self.session_key, None) verifier = request.GET.get('oauth_verifier', None) if raw_token is not None and verifier is not None: data = {'oauth_verifier': verifier} callback = request.build_absolute_uri(callback or request.path) callback = force_text(callback) try: response = self.request( 'post', self.access_token_url, token=raw_token, data=data, oauth_callback=callback) response.raise_for_status() except RequestException as e: logger.error('Unable to fetch access token: {0}'.format(e)) return None else: return response.text return None def get_request_token(self, request, callback): callback = force_text(request.build_absolute_uri(callback)) try: response = self.request('post', self.request_token_url, oauth_callback=callback) response.raise_for_status() except RequestException as e: logger.error('Unable to fetch request token: {0}'.format(e)) return None else: return response.text def get_redirect_args(self, request, callback): callback = force_text(request.build_absolute_uri(callback)) raw_token = self.get_request_token(request, callback) token, secret, _ = self.parse_raw_token(raw_token) if token is not None and secret is not None: request.session[self.session_key] = raw_token args = { 'oauth_token': token, 'oauth_callback': callback, } scope = self.get_scope(request) if scope: args['scope'] = self.scope_separator.join(self.get_scope(request)) return args def parse_raw_token(self, raw_token): if raw_token is None: return (None, None, None) qs = parse_qs(raw_token) token = qs.get('oauth_token', [None])[0] token_secret = qs.get('oauth_token_secret', [None])[0] return (token, token_secret, None) def request(self, method, url, **kwargs): user_token = kwargs.pop('token', self.token) token, secret, _ = self.parse_raw_token(user_token) callback = kwargs.pop('oauth_callback', None) verifier = kwargs.get('data', {}).pop('oauth_verifier', None) oauth = OAuth1( resource_owner_key=token, resource_owner_secret=secret, client_key=self.consumer_key, client_secret=self.consumer_secret, verifier=verifier, callback_uri=callback, ) kwargs['auth'] = oauth return super(OAuthProvider, self).request(method, url, **kwargs) @property def session_key(self): return 'connected-accounts-{0}-request-token'.format(self.id) class OAuth2Provider(BaseOAuthProvider): supports_state = True expires_in_key = 'expires_in' auth_params = {} def check_application_state(self, request): stored = request.session.get(self.session_key, None) returned = request.GET.get('state', None) check = False if stored is not None: if returned is not None: check = constant_time_compare(stored, returned) else: logger.error('No state parameter returned by the provider.') else: logger.error('No state stored in the sesssion.') return check def get_access_token(self, request, callback=None): callback = request.build_absolute_uri(callback or request.path) if not self.check_application_state(request): logger.error('Application state check failed.') return None if 'code' in request.GET: args = { 'client_id': self.consumer_key, 'redirect_uri': callback, 'client_secret': self.consumer_secret, 'code': request.GET['code'], 'grant_type': 'authorization_code', } else: logger.error('No code returned by the provider') return None try: response = self.request('post', self.access_token_url, data=args) response.raise_for_status() except RequestException as e: logger.error('Unable to fetch access token: {0}'.format(e)) return None else: return response.text def refresh_access_token(self, raw_token, **kwargs): token, refresh_token, expires_at = self.parse_raw_token(raw_token) refresh_token = kwargs.pop('refresh_token', refresh_token) args = { 'client_id': self.consumer_key, 'client_secret': self.consumer_secret, 'grant_type': 'refresh_token', 'refresh_token': refresh_token, } try: response = self.request('post', self.access_token_url, data=args) response.raise_for_status() except RequestException as e: logger.error('Unable to fetch access token: {0}'.format(e)) return None else: return response.text def get_application_state(self, request, callback): return get_random_string(32) def get_auth_params(self, request, action=None): return self.auth_params def get_redirect_args(self, request, callback): callback = request.build_absolute_uri(callback) args = { 'client_id': self.consumer_key, 'redirect_uri': callback, 'response_type': 'code', } scope = self.get_scope(request) if scope: args['scope'] = self.scope_separator.join(self.get_scope(request)) state = self.get_application_state(request, callback) if state is not None: args['state'] = state request.session[self.session_key] = state auth_params = self.get_auth_params(request) if auth_params: args.update(auth_params) return args def parse_raw_token(self, raw_token): if raw_token is None: return (None, None, None) try: token_data = json.loads(raw_token) except ValueError: qs = parse_qs(raw_token) token = qs.get('access_token', [None])[0] refresh_token = qs.get('refresh_token', [None])[0] expires_at = qs.get(self.expires_in_key, [None])[0] else: token = token_data.get('access_token', None) refresh_token = token_data.get('refresh_token', None) expires_at = token_data.get(self.expires_in_key, None) if expires_at: expires_at = timezone.now() + timedelta(seconds=int(expires_at)) return (token, refresh_token, expires_at)
BSD 3-Clause New or Revised License
aspose-words-cloud/aspose-words-cloud-python
asposewordscloud/models/style_response.py
StyleResponse.request_id
python
def request_id(self): return self._request_id
Gets the request_id of this StyleResponse. # noqa: E501 Gets or sets the request Id. # noqa: E501 :return: The request_id of this StyleResponse. # noqa: E501 :rtype: str
https://github.com/aspose-words-cloud/aspose-words-cloud-python/blob/abf8fccfed40aa2b09c6cdcaf3f2723e1f412d85/asposewordscloud/models/style_response.py#L68-L76
import pprint import re import datetime import six import json class StyleResponse(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'request_id': 'str', 'style': 'Style' } attribute_map = { 'request_id': 'RequestId', 'style': 'Style' } def __init__(self, request_id=None, style=None): self._request_id = None self._style = None self.discriminator = None if request_id is not None: self.request_id = request_id if style is not None: self.style = style @property
MIT License
frank-qlu/recruit
招聘爬虫/zlzpView/static/zlzpView/venv/Lib/site-packages/numpy/lib/nanfunctions.py
nancumsum
python
def nancumsum(a, axis=None, dtype=None, out=None): a, mask = _replace_nan(a, 0) return np.cumsum(a, axis=axis, dtype=dtype, out=out)
Return the cumulative sum of array elements over a given axis treating Not a Numbers (NaNs) as zero. The cumulative sum does not change when NaNs are encountered and leading NaNs are replaced by zeros. Zeros are returned for slices that are all-NaN or empty. .. versionadded:: 1.12.0 Parameters ---------- a : array_like Input array. axis : int, optional Axis along which the cumulative sum is computed. The default (None) is to compute the cumsum over the flattened array. dtype : dtype, optional Type of the returned array and of the accumulator in which the elements are summed. If `dtype` is not specified, it defaults to the dtype of `a`, unless `a` has an integer dtype with a precision less than that of the default platform integer. In that case, the default platform integer is used. out : ndarray, optional Alternative output array in which to place the result. It must have the same shape and buffer length as the expected output but the type will be cast if necessary. See `doc.ufuncs` (Section "Output arguments") for more details. Returns ------- nancumsum : ndarray. A new array holding the result is returned unless `out` is specified, in which it is returned. The result has the same size as `a`, and the same shape as `a` if `axis` is not None or `a` is a 1-d array. See Also -------- numpy.cumsum : Cumulative sum across array propagating NaNs. isnan : Show which elements are NaN. Examples -------- >>> np.nancumsum(1) array([1]) >>> np.nancumsum([1]) array([1]) >>> np.nancumsum([1, np.nan]) array([ 1., 1.]) >>> a = np.array([[1, 2], [3, np.nan]]) >>> np.nancumsum(a) array([ 1., 3., 6., 6.]) >>> np.nancumsum(a, axis=0) array([[ 1., 2.], [ 4., 2.]]) >>> np.nancumsum(a, axis=1) array([[ 1., 3.], [ 3., 3.]])
https://github.com/frank-qlu/recruit/blob/0875fb1d2cfb581aaa8abc7a97880c0ce5bf6147/招聘爬虫/zlzpView/static/zlzpView/venv/Lib/site-packages/numpy/lib/nanfunctions.py#L692-L754
from __future__ import division, absolute_import, print_function import functools import warnings import numpy as np from numpy.lib import function_base from numpy.core import overrides array_function_dispatch = functools.partial( overrides.array_function_dispatch, module='numpy') __all__ = [ 'nansum', 'nanmax', 'nanmin', 'nanargmax', 'nanargmin', 'nanmean', 'nanmedian', 'nanpercentile', 'nanvar', 'nanstd', 'nanprod', 'nancumsum', 'nancumprod', 'nanquantile' ] def _replace_nan(a, val): a = np.array(a, subok=True, copy=True) if a.dtype == np.object_: mask = a != a elif issubclass(a.dtype.type, np.inexact): mask = np.isnan(a) else: mask = None if mask is not None: np.copyto(a, val, where=mask) return a, mask def _copyto(a, val, mask): if isinstance(a, np.ndarray): np.copyto(a, val, where=mask, casting='unsafe') else: a = a.dtype.type(val) return a def _remove_nan_1d(arr1d, overwrite_input=False): c = np.isnan(arr1d) s = np.nonzero(c)[0] if s.size == arr1d.size: warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=4) return arr1d[:0], True elif s.size == 0: return arr1d, overwrite_input else: if not overwrite_input: arr1d = arr1d.copy() enonan = arr1d[-s.size:][~c[-s.size:]] arr1d[s[:enonan.size]] = enonan return arr1d[:-s.size], True def _divide_by_count(a, b, out=None): with np.errstate(invalid='ignore', divide='ignore'): if isinstance(a, np.ndarray): if out is None: return np.divide(a, b, out=a, casting='unsafe') else: return np.divide(a, b, out=out, casting='unsafe') else: if out is None: return a.dtype.type(a / b) else: return np.divide(a, b, out=out, casting='unsafe') def _nanmin_dispatcher(a, axis=None, out=None, keepdims=None): return (a, out) @array_function_dispatch(_nanmin_dispatcher) def nanmin(a, axis=None, out=None, keepdims=np._NoValue): kwargs = {} if keepdims is not np._NoValue: kwargs['keepdims'] = keepdims if type(a) is np.ndarray and a.dtype != np.object_: res = np.fmin.reduce(a, axis=axis, out=out, **kwargs) if np.isnan(res).any(): warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=2) else: a, mask = _replace_nan(a, +np.inf) res = np.amin(a, axis=axis, out=out, **kwargs) if mask is None: return res mask = np.all(mask, axis=axis, **kwargs) if np.any(mask): res = _copyto(res, np.nan, mask) warnings.warn("All-NaN axis encountered", RuntimeWarning, stacklevel=2) return res def _nanmax_dispatcher(a, axis=None, out=None, keepdims=None): return (a, out) @array_function_dispatch(_nanmax_dispatcher) def nanmax(a, axis=None, out=None, keepdims=np._NoValue): kwargs = {} if keepdims is not np._NoValue: kwargs['keepdims'] = keepdims if type(a) is np.ndarray and a.dtype != np.object_: res = np.fmax.reduce(a, axis=axis, out=out, **kwargs) if np.isnan(res).any(): warnings.warn("All-NaN slice encountered", RuntimeWarning, stacklevel=2) else: a, mask = _replace_nan(a, -np.inf) res = np.amax(a, axis=axis, out=out, **kwargs) if mask is None: return res mask = np.all(mask, axis=axis, **kwargs) if np.any(mask): res = _copyto(res, np.nan, mask) warnings.warn("All-NaN axis encountered", RuntimeWarning, stacklevel=2) return res def _nanargmin_dispatcher(a, axis=None): return (a,) @array_function_dispatch(_nanargmin_dispatcher) def nanargmin(a, axis=None): a, mask = _replace_nan(a, np.inf) res = np.argmin(a, axis=axis) if mask is not None: mask = np.all(mask, axis=axis) if np.any(mask): raise ValueError("All-NaN slice encountered") return res def _nanargmax_dispatcher(a, axis=None): return (a,) @array_function_dispatch(_nanargmax_dispatcher) def nanargmax(a, axis=None): a, mask = _replace_nan(a, -np.inf) res = np.argmax(a, axis=axis) if mask is not None: mask = np.all(mask, axis=axis) if np.any(mask): raise ValueError("All-NaN slice encountered") return res def _nansum_dispatcher(a, axis=None, dtype=None, out=None, keepdims=None): return (a, out) @array_function_dispatch(_nansum_dispatcher) def nansum(a, axis=None, dtype=None, out=None, keepdims=np._NoValue): a, mask = _replace_nan(a, 0) return np.sum(a, axis=axis, dtype=dtype, out=out, keepdims=keepdims) def _nanprod_dispatcher(a, axis=None, dtype=None, out=None, keepdims=None): return (a, out) @array_function_dispatch(_nanprod_dispatcher) def nanprod(a, axis=None, dtype=None, out=None, keepdims=np._NoValue): a, mask = _replace_nan(a, 1) return np.prod(a, axis=axis, dtype=dtype, out=out, keepdims=keepdims) def _nancumsum_dispatcher(a, axis=None, dtype=None, out=None): return (a, out) @array_function_dispatch(_nancumsum_dispatcher)
Apache License 2.0
facebook/fai-pep
benchmarking/platforms/device_manager.py
DeviceManager._updateHeartbeats
python
def _updateHeartbeats(self): claimer_id = self.args.claimer_id hashes = [] for k in self.lab_devices: for hash in self.lab_devices[k]: if self.lab_devices[k][hash]["live"]: hashes.append(hash) hashes = ",".join(hashes) self.db.updateHeartbeats(claimer_id, hashes)
Update device heartbeats for all devices which are marked "live" in lab devices.
https://github.com/facebook/fai-pep/blob/50f58e17f52814e7e4bf0c91d2dee9429b80bb3f/benchmarking/platforms/device_manager.py#L147-L156
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import datetime import json import time from threading import Thread, RLock from typing import Dict from bridge.db import DBDriver from get_connected_devices import GetConnectedDevices from platforms.android.adb import ADB from platforms.platforms import getDeviceList from reboot_device import reboot as reboot_device from utils.custom_logger import getLogger REBOOT_INTERVAL = datetime.timedelta(hours=8) MINIMUM_DM_INTERVAL = 10 DEFAULT_DM_INTERVAL = 10 def getDevicesString(devices): device_list = [ d["kind"] + "|" + d["hash"] + "|" + d["name"] + "|" + d["abi"] + "|" + d["os"] + "|" + ("1" if d["available"] else "0" if d["live"] else "2") for d in devices ] devices_str = ",".join(device_list) return devices_str def valid_dm_interval(arg) -> int: try: value = int(arg) if value < MINIMUM_DM_INTERVAL: raise ValueError() except ValueError: getLogger().warning( "Logging interval must be specified as an integer in seconds >= {}. Using default {}s.".format( MINIMUM_DM_INTERVAL, DEFAULT_DM_INTERVAL ) ) value = DEFAULT_DM_INTERVAL return value class DeviceManager(object): def __init__(self, args: Dict, db: DBDriver): self.args = args self.db: DBDriver = db self.lab_devices = {} self.online_devices = None self._initializeDevices() self.running = True self.device_monitor_interval = self.args.device_monitor_interval self.device_monitor = Thread(target=self._runDeviceMonitor) self.device_monitor.start() if self.args.usb_hub_device_mapping: from utils.usb_controller import USBController self.usb_controller = USBController(self.args.usb_hub_device_mapping) else: self.usb_controller = None def getLabDevices(self): return self.lab_devices def _runDeviceMonitor(self): while self.running: if self.args.platform.startswith( "android" ) or self.args.platform.startswith("ios"): self._checkDevices() self._updateHeartbeats() time.sleep(self.device_monitor_interval) def _checkDevices(self): try: online_hashes = getDeviceList(self.args, silent=True) offline_devices = [ device for device in self.online_devices if device["hash"] not in online_hashes ] new_devices = [ h for h in online_hashes if h not in [p["hash"] for p in self.online_devices] ] if offline_devices: for offline_device in offline_devices: lab_device = self.lab_devices[offline_device["kind"]][ offline_device["hash"] ] usb_disabled = False if self.usb_controller and not self.usb_controller.active.get( lab_device["hash"], True ): usb_disabled = True if "rebooting" not in lab_device and not usb_disabled: getLogger().error( "Device {} has become unavailable.".format(offline_device) ) self._disableDevice(offline_device) if new_devices: devices = ",".join(new_devices) devices = self._getDevices(devices) if devices: for d in devices: self._enableDevice(d) if d["hash"] not in [ device["hash"] for device in self.online_devices ]: self.online_devices.append(d) getLogger().info("New device added: {}".format(d)) except BaseException: getLogger().exception("Error while checking devices.")
Apache License 2.0
kustomzone/fuzium
core/src/lib/pyelliptic/openssl.py
_OpenSSL.__init__
python
def __init__(self, library): self._lib = ctypes.CDLL(library) self.pointer = ctypes.pointer self.c_int = ctypes.c_int self.byref = ctypes.byref self.create_string_buffer = ctypes.create_string_buffer self.BN_new = self._lib.BN_new self.BN_new.restype = ctypes.c_void_p self.BN_new.argtypes = [] self.BN_free = self._lib.BN_free self.BN_free.restype = None self.BN_free.argtypes = [ctypes.c_void_p] self.BN_num_bits = self._lib.BN_num_bits self.BN_num_bits.restype = ctypes.c_int self.BN_num_bits.argtypes = [ctypes.c_void_p] self.BN_bn2bin = self._lib.BN_bn2bin self.BN_bn2bin.restype = ctypes.c_int self.BN_bn2bin.argtypes = [ctypes.c_void_p, ctypes.c_void_p] self.BN_bin2bn = self._lib.BN_bin2bn self.BN_bin2bn.restype = ctypes.c_void_p self.BN_bin2bn.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] self.EC_KEY_free = self._lib.EC_KEY_free self.EC_KEY_free.restype = None self.EC_KEY_free.argtypes = [ctypes.c_void_p] self.EC_KEY_new_by_curve_name = self._lib.EC_KEY_new_by_curve_name self.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p self.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int] self.EC_KEY_generate_key = self._lib.EC_KEY_generate_key self.EC_KEY_generate_key.restype = ctypes.c_int self.EC_KEY_generate_key.argtypes = [ctypes.c_void_p] self.EC_KEY_check_key = self._lib.EC_KEY_check_key self.EC_KEY_check_key.restype = ctypes.c_int self.EC_KEY_check_key.argtypes = [ctypes.c_void_p] self.EC_KEY_get0_private_key = self._lib.EC_KEY_get0_private_key self.EC_KEY_get0_private_key.restype = ctypes.c_void_p self.EC_KEY_get0_private_key.argtypes = [ctypes.c_void_p] self.EC_KEY_get0_public_key = self._lib.EC_KEY_get0_public_key self.EC_KEY_get0_public_key.restype = ctypes.c_void_p self.EC_KEY_get0_public_key.argtypes = [ctypes.c_void_p] self.EC_KEY_get0_group = self._lib.EC_KEY_get0_group self.EC_KEY_get0_group.restype = ctypes.c_void_p self.EC_KEY_get0_group.argtypes = [ctypes.c_void_p] self.EC_POINT_get_affine_coordinates_GFp = self._lib.EC_POINT_get_affine_coordinates_GFp self.EC_POINT_get_affine_coordinates_GFp.restype = ctypes.c_int self.EC_POINT_get_affine_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key self.EC_KEY_set_private_key.restype = ctypes.c_int self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p] self.EC_KEY_set_public_key = self._lib.EC_KEY_set_public_key self.EC_KEY_set_public_key.restype = ctypes.c_int self.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p] self.EC_KEY_set_group = self._lib.EC_KEY_set_group self.EC_KEY_set_group.restype = ctypes.c_int self.EC_KEY_set_group.argtypes = [ctypes.c_void_p, ctypes.c_void_p] self.EC_POINT_set_affine_coordinates_GFp = self._lib.EC_POINT_set_affine_coordinates_GFp self.EC_POINT_set_affine_coordinates_GFp.restype = ctypes.c_int self.EC_POINT_set_affine_coordinates_GFp.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] self.EC_POINT_new = self._lib.EC_POINT_new self.EC_POINT_new.restype = ctypes.c_void_p self.EC_POINT_new.argtypes = [ctypes.c_void_p] self.EC_POINT_free = self._lib.EC_POINT_free self.EC_POINT_free.restype = None self.EC_POINT_free.argtypes = [ctypes.c_void_p] self.BN_CTX_free = self._lib.BN_CTX_free self.BN_CTX_free.restype = None self.BN_CTX_free.argtypes = [ctypes.c_void_p] self.EC_POINT_mul = self._lib.EC_POINT_mul self.EC_POINT_mul.restype = None self.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] self.EC_KEY_set_private_key = self._lib.EC_KEY_set_private_key self.EC_KEY_set_private_key.restype = ctypes.c_int self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p] self.ECDH_OpenSSL = self._lib.ECDH_OpenSSL self._lib.ECDH_OpenSSL.restype = ctypes.c_void_p self._lib.ECDH_OpenSSL.argtypes = [] self.BN_CTX_new = self._lib.BN_CTX_new self._lib.BN_CTX_new.restype = ctypes.c_void_p self._lib.BN_CTX_new.argtypes = [] self.ECDH_set_method = self._lib.ECDH_set_method self._lib.ECDH_set_method.restype = ctypes.c_int self._lib.ECDH_set_method.argtypes = [ctypes.c_void_p, ctypes.c_void_p] self.ECDH_compute_key = self._lib.ECDH_compute_key self.ECDH_compute_key.restype = ctypes.c_int self.ECDH_compute_key.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p] self.EVP_CipherInit_ex = self._lib.EVP_CipherInit_ex self.EVP_CipherInit_ex.restype = ctypes.c_int self.EVP_CipherInit_ex.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] self.EVP_CIPHER_CTX_new = self._lib.EVP_CIPHER_CTX_new self.EVP_CIPHER_CTX_new.restype = ctypes.c_void_p self.EVP_CIPHER_CTX_new.argtypes = [] self.EVP_aes_128_cfb128 = self._lib.EVP_aes_128_cfb128 self.EVP_aes_128_cfb128.restype = ctypes.c_void_p self.EVP_aes_128_cfb128.argtypes = [] self.EVP_aes_256_cfb128 = self._lib.EVP_aes_256_cfb128 self.EVP_aes_256_cfb128.restype = ctypes.c_void_p self.EVP_aes_256_cfb128.argtypes = [] self.EVP_aes_128_cbc = self._lib.EVP_aes_128_cbc self.EVP_aes_128_cbc.restype = ctypes.c_void_p self.EVP_aes_128_cbc.argtypes = [] self.EVP_aes_256_cbc = self._lib.EVP_aes_256_cbc self.EVP_aes_256_cbc.restype = ctypes.c_void_p self.EVP_aes_256_cbc.argtypes = [] self.EVP_aes_128_ofb = self._lib.EVP_aes_128_ofb self.EVP_aes_128_ofb.restype = ctypes.c_void_p self.EVP_aes_128_ofb.argtypes = [] self.EVP_aes_256_ofb = self._lib.EVP_aes_256_ofb self.EVP_aes_256_ofb.restype = ctypes.c_void_p self.EVP_aes_256_ofb.argtypes = [] self.EVP_bf_cbc = self._lib.EVP_bf_cbc self.EVP_bf_cbc.restype = ctypes.c_void_p self.EVP_bf_cbc.argtypes = [] self.EVP_bf_cfb64 = self._lib.EVP_bf_cfb64 self.EVP_bf_cfb64.restype = ctypes.c_void_p self.EVP_bf_cfb64.argtypes = [] self.EVP_rc4 = self._lib.EVP_rc4 self.EVP_rc4.restype = ctypes.c_void_p self.EVP_rc4.argtypes = [] self.EVP_CIPHER_CTX_cleanup = self._lib.EVP_CIPHER_CTX_cleanup self.EVP_CIPHER_CTX_cleanup.restype = ctypes.c_int self.EVP_CIPHER_CTX_cleanup.argtypes = [ctypes.c_void_p] self.EVP_CIPHER_CTX_free = self._lib.EVP_CIPHER_CTX_free self.EVP_CIPHER_CTX_free.restype = None self.EVP_CIPHER_CTX_free.argtypes = [ctypes.c_void_p] self.EVP_CipherUpdate = self._lib.EVP_CipherUpdate self.EVP_CipherUpdate.restype = ctypes.c_int self.EVP_CipherUpdate.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int] self.EVP_CipherFinal_ex = self._lib.EVP_CipherFinal_ex self.EVP_CipherFinal_ex.restype = ctypes.c_int self.EVP_CipherFinal_ex.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] self.EVP_DigestInit = self._lib.EVP_DigestInit self.EVP_DigestInit.restype = ctypes.c_int self._lib.EVP_DigestInit.argtypes = [ctypes.c_void_p, ctypes.c_void_p] self.EVP_DigestInit_ex = self._lib.EVP_DigestInit_ex self.EVP_DigestInit_ex.restype = ctypes.c_int self._lib.EVP_DigestInit_ex.argtypes = 3 * [ctypes.c_void_p] self.EVP_DigestUpdate = self._lib.EVP_DigestUpdate self.EVP_DigestUpdate.restype = ctypes.c_int self.EVP_DigestUpdate.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int] self.EVP_DigestFinal = self._lib.EVP_DigestFinal self.EVP_DigestFinal.restype = ctypes.c_int self.EVP_DigestFinal.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] self.EVP_DigestFinal_ex = self._lib.EVP_DigestFinal_ex self.EVP_DigestFinal_ex.restype = ctypes.c_int self.EVP_DigestFinal_ex.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] self.EVP_ecdsa = self._lib.EVP_ecdsa self._lib.EVP_ecdsa.restype = ctypes.c_void_p self._lib.EVP_ecdsa.argtypes = [] self.ECDSA_sign = self._lib.ECDSA_sign self.ECDSA_sign.restype = ctypes.c_int self.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] self.ECDSA_verify = self._lib.ECDSA_verify self.ECDSA_verify.restype = ctypes.c_int self.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] self.EVP_MD_CTX_create = self._lib.EVP_MD_CTX_create self.EVP_MD_CTX_create.restype = ctypes.c_void_p self.EVP_MD_CTX_create.argtypes = [] self.EVP_MD_CTX_init = self._lib.EVP_MD_CTX_init self.EVP_MD_CTX_init.restype = None self.EVP_MD_CTX_init.argtypes = [ctypes.c_void_p] self.EVP_MD_CTX_destroy = self._lib.EVP_MD_CTX_destroy self.EVP_MD_CTX_destroy.restype = None self.EVP_MD_CTX_destroy.argtypes = [ctypes.c_void_p] self.RAND_bytes = self._lib.RAND_bytes self.RAND_bytes.restype = ctypes.c_int self.RAND_bytes.argtypes = [ctypes.c_void_p, ctypes.c_int] self.EVP_sha256 = self._lib.EVP_sha256 self.EVP_sha256.restype = ctypes.c_void_p self.EVP_sha256.argtypes = [] self.i2o_ECPublicKey = self._lib.i2o_ECPublicKey self.i2o_ECPublicKey.restype = ctypes.c_void_p self.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p] self.EVP_sha512 = self._lib.EVP_sha512 self.EVP_sha512.restype = ctypes.c_void_p self.EVP_sha512.argtypes = [] self.HMAC = self._lib.HMAC self.HMAC.restype = ctypes.c_void_p self.HMAC.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p] try: self.PKCS5_PBKDF2_HMAC = self._lib.PKCS5_PBKDF2_HMAC except: self.PKCS5_PBKDF2_HMAC = self._lib.PKCS5_PBKDF2_HMAC_SHA1 self.PKCS5_PBKDF2_HMAC.restype = ctypes.c_int self.PKCS5_PBKDF2_HMAC.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] self._set_ciphers() self._set_curves()
Build the wrapper
https://github.com/kustomzone/fuzium/blob/e98d470370e8af9d5c5e9147b18a33745e8417ab/core/src/lib/pyelliptic/openssl.py#L40-L316
import sys import ctypes import logging import os OpenSSL = None class CipherName: def __init__(self, name, pointer, blocksize): self._name = name self._pointer = pointer self._blocksize = blocksize def __str__(self): return "Cipher : " + self._name + " | Blocksize : " + str(self._blocksize) + " | Fonction pointer : " + str(self._pointer) def get_pointer(self): return self._pointer() def get_name(self): return self._name def get_blocksize(self): return self._blocksize class _OpenSSL:
MIT License
google/clusterfuzz
src/appengine/handlers/cron/helpers/bot_manager.py
InstanceGroup.delete
python
def delete(self): self.execute(self.compute.instanceGroupManagers().delete( project=self.project_id, zone=self.zone, instanceGroupManager=self.name))
Delete this instance group.
https://github.com/google/clusterfuzz/blob/e9e105d66f009356c4f3fe9ae7873ffff126b234/src/appengine/handlers/cron/helpers/bot_manager.py#L268-L272
import time import google_auth_httplib2 import googleapiclient from googleapiclient.discovery import build import httplib2 from clusterfuzz._internal.base import retry from clusterfuzz._internal.google_cloud_utils import credentials RETRY_COUNT = 8 RETRY_DELAY = 4 REQUEST_TIMEOUT = 180 _SCOPES = [ 'https://www.googleapis.com/auth/cloud-platform', ] class BotManagerException(Exception): class OperationError(BotManagerException): class RequestError(BotManagerException): class NotFoundError(RequestError): class AlreadyExistsError(RequestError): class RetryableError(RequestError): class BotManager(object): def __init__(self, project_id, zone): self.project_id = project_id self.zone = zone creds = credentials.get_default(scopes=_SCOPES)[0] http = google_auth_httplib2.AuthorizedHttp( creds, http=httplib2.Http(timeout=REQUEST_TIMEOUT)) self.compute = build('compute', 'v1', http=http, cache_discovery=False) def instance_group(self, name): return InstanceGroup(name, self) def instance_template(self, name): return InstanceTemplate(name, self) class Resource(object): _OPERATION_POLL_SECONDS = 5 def __init__(self, name, manager): self.name = name self.manager = manager @property def compute(self): return self.manager.compute @property def project_id(self): return self.manager.project_id @property def zone(self): return self.manager.zone def get(self): raise NotImplementedError def exists(self): try: self.get() return True except NotFoundError: return False def _wait_for_operation(self, operation): while True: if operation['status'] == 'DONE': if 'error' in operation: raise OperationError(operation['error']) return operation time.sleep(self._OPERATION_POLL_SECONDS) if 'zone' in operation: operation = self.compute.zoneOperations().get( project=self.project_id, zone=self.zone, operation=operation['name']).execute() else: operation = self.compute.globalOperations().get( project=self.project_id, operation=operation['name']).execute() def _identity(self, response): return response @retry.wrap( RETRY_COUNT, RETRY_DELAY, 'handlers.cron.helpers.bot_manager.Resource.execute', exception_type=RetryableError) def execute(self, request, result_proc=None): if result_proc is None: result_proc = self._wait_for_operation try: response = request.execute() except googleapiclient.errors.HttpError as e: if e.resp.status in [400, 403, 500, 503]: raise RetryableError(str(e)) if e.resp.status == 404: raise NotFoundError(str(e)) if e.resp.status == 409: raise AlreadyExistsError(str(e)) raise RequestError(str(e)) return result_proc(response) class InstanceGroup(Resource): MIN_INSTANCES_RATIO = 0.8 MAX_ERROR_RATIO = 1.0 - MIN_INSTANCES_RATIO def _wait_for_instances(self): while True: num_instances = 0 instances_ready = 0 errors = [] for instance in self.list_managed_instances(): num_instances += 1 if instance['currentAction'] == 'NONE': instances_ready += 1 elif 'lastAttempt' in instance and 'errors' in instance['lastAttempt']: errors.append(instance['lastAttempt']['errors']) if instances_ready >= max(1, num_instances * self.MIN_INSTANCES_RATIO): return if len(errors) > num_instances * self.MAX_ERROR_RATIO: raise OperationError(errors) time.sleep(1) def _handle_size_change(self, response): self._wait_for_operation(response) self._wait_for_instances() def get(self): return self.execute( self.compute.instanceGroupManagers().get( project=self.project_id, zone=self.zone, instanceGroupManager=self.name), result_proc=self._identity) def list_managed_instances(self, instance_filter=None): next_page_token = None while True: response = self.execute( self.compute.instanceGroupManagers().listManagedInstances( project=self.project_id, zone=self.zone, instanceGroupManager=self.name, pageToken=next_page_token, filter=instance_filter), result_proc=self._identity) for instance in response['managedInstances']: if instance['currentAction'] != 'DELETING': yield instance if 'nextPageToken' in response: next_page_token = response['nextPageToken'] else: break def create(self, base_instance_name, instance_template, size=0, wait_for_instances=True): manager_body = { 'baseInstanceName': base_instance_name, 'instanceTemplate': 'global/instanceTemplates/' + instance_template, 'name': self.name, 'targetSize': size, } result_proc = None if wait_for_instances: result_proc = self._handle_size_change self.execute( self.compute.instanceGroupManagers().insert( project=self.project_id, zone=self.zone, body=manager_body), result_proc=result_proc) def resize(self, new_size, wait_for_instances=True): result_proc = None if wait_for_instances: result_proc = self._handle_size_change self.execute( self.compute.instanceGroupManagers().resize( project=self.project_id, zone=self.zone, instanceGroupManager=self.name, size=new_size), result_proc=result_proc)
Apache License 2.0
shivamsarodia/shivyc
shivyc/asm_gen.py
NodeGraph.prefs
python
def prefs(self, n): return self._pref[n]
Return the list of nodes to which n has a preference edge.
https://github.com/shivamsarodia/shivyc/blob/e7d72eff237e1ef49ec70333497348baf86be425/shivyc/asm_gen.py#L217-L219
import itertools import shivyc.asm_cmds as asm_cmds import shivyc.spots as spots from shivyc.spots import Spot, RegSpot, MemSpot, LiteralSpot class ASMCode: def __init__(self): self.lines = [] self.comm = [] self.globals = [] self.data = [] self.string_literals = [] def add(self, cmd): self.lines.append(cmd) label_num = 0 @staticmethod def get_label(): ASMCode.label_num += 1 return f"__shivyc_label{ASMCode.label_num}" def add_global(self, name): self.globals.append(f"\t.global {name}") def add_data(self, name, size, init): self.data.append(f"{name}:") size_strs = {1: "byte", 2: "word", 4: "int", 8: "quad"} if init: self.data.append(f"\t.{size_strs[size]} {init}") else: self.data.append(f"\t.zero {size}") def add_comm(self, name, size, local): if local: self.comm.append(f"\t.local {name}") self.comm.append(f"\t.comm {name} {size}") def add_string_literal(self, name, chars): self.string_literals.append(f"{name}:") data = ",".join(str(char) for char in chars) self.string_literals.append(f"\t.byte {data}") def full_code(self): header = ["\t.intel_syntax noprefix"] header += self.comm if self.string_literals or self.data: header += ["\t.section .data"] header += self.data header += self.string_literals header += [""] header += ["\t.section .text"] + self.globals header += [str(line) for line in self.lines] return "\n".join(header + ["\t.att_syntax noprefix", ""]) class NodeGraph: def __init__(self, nodes=None): self._real_nodes = nodes or [] self._all_nodes = self._real_nodes[:] self._conf = {n: [] for n in self._all_nodes} self._pref = {n: [] for n in self._all_nodes} def is_node(self, n): return n in self._conf and n in self._pref def add_dummy_node(self, v): self._all_nodes.append(v) self._conf[v] = [] self._pref[v] = [] for n in self._all_nodes: if n not in self._real_nodes and n != v: self.add_conflict(n, v) def add_conflict(self, n1, n2): if n2 not in self._conf[n1]: self._conf[n1].append(n2) if n1 not in self._conf[n2]: self._conf[n2].append(n1) def add_pref(self, n1, n2): if n2 not in self._pref[n1]: self._pref[n1].append(n2) if n1 not in self._pref[n2]: self._pref[n2].append(n1) def pop(self, n): del self._conf[n] del self._pref[n] if n in self._real_nodes: self._real_nodes.remove(n) self._all_nodes.remove(n) for v in self._conf: if n in self._conf[v]: self._conf[v].remove(n) for v in self._pref: if n in self._pref[v]: self._pref[v].remove(n) return n def merge(self, n1, n2): total_conf = self._conf[n1][:] for c in self._conf[n2]: if c not in total_conf: total_conf.append(c) self._conf[n1] = total_conf for c in self._conf[n1]: if n2 in self._conf[c]: self._conf[c].remove(n2) if n1 not in self._conf[c]: self._conf[c].append(n1) total_pref = self._pref[n1][:] for p in self._pref[n2]: if p not in total_pref: total_pref.append(p) if n1 in total_pref: total_pref.remove(n1) if n2 in total_pref: total_pref.remove(n2) self._pref[n1] = total_pref for c in self._pref[n1]: if n2 in self._pref[c]: self._pref[c].remove(n2) if n1 not in self._pref[c]: self._pref[c].append(n1) del self._conf[n2] del self._pref[n2] self._real_nodes.remove(n2) self._all_nodes.remove(n2) def remove_pref(self, n1, n2): self._pref[n1].remove(n2) self._pref[n2].remove(n1)
MIT License
steven-lang/dafne
dafne/modeling/backbone/fpn.py
build_dafne_resnet_fpn_backbone
python
def build_dafne_resnet_fpn_backbone(cfg, input_shape: ShapeSpec): if cfg.MODEL.BACKBONE.ANTI_ALIAS: bottom_up = build_resnet_lpf_backbone(cfg, input_shape) elif cfg.MODEL.RESNETS.DEFORM_INTERVAL > 1: bottom_up = build_resnet_interval_backbone(cfg, input_shape) else: bottom_up = build_resnet_backbone(cfg, input_shape) in_features = cfg.MODEL.FPN.IN_FEATURES out_channels = cfg.MODEL.FPN.OUT_CHANNELS top_levels = cfg.MODEL.DAFNE.TOP_LEVELS in_channels_top = out_channels if top_levels == 2: top_block = LastLevelP6P7(in_channels_top, out_channels, "p5") if top_levels == 1: top_block = LastLevelP6(in_channels_top, out_channels, "p5") elif top_levels == 0: top_block = None backbone = FPN( bottom_up=bottom_up, in_features=in_features, out_channels=out_channels, norm=cfg.MODEL.FPN.NORM, top_block=top_block, fuse_type=cfg.MODEL.FPN.FUSE_TYPE, ) return backbone
Args: cfg: a detectron2 CfgNode Returns: backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.
https://github.com/steven-lang/dafne/blob/b5eb910c29d6fc8d62971d53ee31dc1321cc236f/dafne/modeling/backbone/fpn.py#L59-L91
from detectron2.modeling.backbone.resnet import BasicStem, DeformBottleneckBlock, ResNet from torch import nn import torch.nn.functional as F import fvcore.nn.weight_init as weight_init from detectron2.modeling.backbone import FPN, build_resnet_backbone from detectron2.layers import ShapeSpec from detectron2.modeling.backbone.build import BACKBONE_REGISTRY from .resnet_lpf import build_resnet_lpf_backbone from .resnet_interval import build_resnet_interval_backbone class LastLevelP6P7(nn.Module): def __init__(self, in_channels, out_channels, in_features="res5"): super().__init__() self.num_levels = 2 self.in_feature = in_features self.p6 = nn.Conv2d(in_channels, out_channels, 3, 2, 1) self.p7 = nn.Conv2d(out_channels, out_channels, 3, 2, 1) for module in [self.p6, self.p7]: weight_init.c2_xavier_fill(module) self.act = nn.ReLU() def forward(self, x): p6 = self.p6(x) p7 = self.p7(self.act(p6)) return [p6, p7] class LastLevelP6(nn.Module): def __init__(self, in_channels, out_channels, in_features="res5"): super().__init__() self.num_levels = 1 self.in_feature = in_features self.p6 = nn.Conv2d(in_channels, out_channels, 3, 2, 1) for module in [self.p6]: weight_init.c2_xavier_fill(module) def forward(self, x): p6 = self.p6(x) return [p6] @BACKBONE_REGISTRY.register()
MIT License
robinhood/aiokafka
aiokafka/consumer/group_coordinator.py
GroupCoordinator.check_errors
python
def check_errors(self): if self._coordination_task.done(): self._coordination_task.result() if self._error_consumed_fut is not None: self._error_consumed_fut.set_result(None) self._error_consumed_fut = None if self._pending_exception is not None: exc = self._pending_exception self._pending_exception = None raise exc
Check if coordinator is well and no authorization or unrecoverable errors occured
https://github.com/robinhood/aiokafka/blob/10e4119104ee32b726ab750b3e7120a85d2c7f25/aiokafka/consumer/group_coordinator.py#L297-L309
import asyncio import collections import logging import copy from kafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor from kafka.coordinator.protocol import ConsumerProtocol from kafka.protocol.commit import ( OffsetCommitRequest_v2 as OffsetCommitRequest, OffsetFetchRequest_v1 as OffsetFetchRequest) from kafka.protocol.group import ( HeartbeatRequest, JoinGroupRequest, LeaveGroupRequest, SyncGroupRequest) import aiokafka.errors as Errors from aiokafka.structs import OffsetAndMetadata, TopicPartition from aiokafka.client import ConnectionGroup, CoordinationType from aiokafka.util import ensure_future, create_future log = logging.getLogger(__name__) UNKNOWN_OFFSET = -1 class BaseCoordinator(object): def __init__(self, client, subscription, *, loop, exclude_internal_topics=True): self._loop = loop self._client = client self._exclude_internal_topics = exclude_internal_topics self._subscription = subscription self._metadata_snapshot = {} self._cluster = client.cluster self._handle_metadata_update(self._cluster) self._cluster.add_listener(self._handle_metadata_update) def _handle_metadata_update(self, cluster): subscription = self._subscription if subscription.subscribed_pattern: topics = [] for topic in cluster.topics(self._exclude_internal_topics): if subscription.subscribed_pattern.match(topic): topics.append(topic) if subscription.subscription is None or set(topics) != subscription.subscription.topics: subscription.subscribe_from_pattern(topics) if subscription.partitions_auto_assigned() and self._group_subscription is not None: metadata_snapshot = self._get_metadata_snapshot() if self._metadata_snapshot != metadata_snapshot: log.debug("Metadata for topic has changed from %s to %s. ", self._metadata_snapshot, metadata_snapshot) self._metadata_snapshot = metadata_snapshot self._on_metadata_change() def _get_metadata_snapshot(self): partitions_per_topic = {} for topic in self._group_subscription: partitions = self._cluster.partitions_for_topic(topic) or [] partitions_per_topic[topic] = len(partitions) return partitions_per_topic class NoGroupCoordinator(BaseCoordinator): def __init__(self, *args, **kw): super().__init__(*args, **kw) self._reset_committed_task = ensure_future( self._reset_committed_routine(), loop=self._loop) def _on_metadata_change(self): self.assign_all_partitions() def assign_all_partitions(self, check_unknown=False): partitions = [] for topic in self._subscription.subscription.topics: p_ids = self._cluster.partitions_for_topic(topic) if not p_ids and check_unknown: raise Errors.UnknownTopicOrPartitionError() for p_id in p_ids: partitions.append(TopicPartition(topic, p_id)) assignment = self._subscription.subscription.assignment if assignment is None or set(partitions) != assignment.tps: self._subscription.assign_from_subscribed(partitions) async def _reset_committed_routine(self): event_waiter = None try: while True: if self._subscription.subscription is None: await self._subscription.wait_for_subscription() continue assignment = self._subscription.subscription.assignment if assignment is None: await self._subscription.wait_for_assignment() continue commit_refresh_needed = assignment.commit_refresh_needed commit_refresh_needed.clear() for tp in assignment.requesting_committed(): tp_state = assignment.state_value(tp) tp_state.update_committed( OffsetAndMetadata(UNKNOWN_OFFSET, "")) event_waiter = ensure_future( commit_refresh_needed.wait(), loop=self._loop) await asyncio.wait( [assignment.unassign_future, event_waiter], return_when=asyncio.FIRST_COMPLETED, loop=self._loop) except asyncio.CancelledError: pass if event_waiter is not None and not event_waiter.done(): event_waiter.cancel() event_waiter = None @property def _group_subscription(self): return self._subscription.subscription.topics async def close(self): self._reset_committed_task.cancel() await self._reset_committed_task self._reset_committed_task = None def check_errors(self): if self._reset_committed_task.done(): self._reset_committed_task.result() class GroupCoordinator(BaseCoordinator): def __init__(self, client, subscription, *, loop, group_id='aiokafka-default-group', session_timeout_ms=10000, heartbeat_interval_ms=3000, retry_backoff_ms=100, enable_auto_commit=True, auto_commit_interval_ms=5000, assignors=(RoundRobinPartitionAssignor,), exclude_internal_topics=True, max_poll_interval_ms=300000, rebalance_timeout_ms=30000 ): self._group_subscription = None super().__init__( client, subscription, loop=loop, exclude_internal_topics=exclude_internal_topics) self._session_timeout_ms = session_timeout_ms self._heartbeat_interval_ms = heartbeat_interval_ms self._max_poll_interval = max_poll_interval_ms / 1000 self._rebalance_timeout_ms = rebalance_timeout_ms self._retry_backoff_ms = retry_backoff_ms self._assignors = assignors self._enable_auto_commit = enable_auto_commit self._auto_commit_interval_ms = auto_commit_interval_ms self.generation = OffsetCommitRequest.DEFAULT_GENERATION_ID self.member_id = JoinGroupRequest[0].UNKNOWN_MEMBER_ID self.group_id = group_id self.coordinator_id = None self._performed_join_prepare = False self._rejoin_needed_fut = create_future(loop=loop) self._coordinator_dead_fut = create_future(loop=loop) self._coordination_task = ensure_future( self._coordination_routine(), loop=loop) def _on_coordination_done(fut): try: fut.result() except asyncio.CancelledError: raise except Exception: log.error( "Unexpected error in coordinator routine", exc_info=True) self._coordination_task.add_done_callback(_on_coordination_done) self._heartbeat_task = None self._commit_refresh_task = None self._pending_exception = None self._error_consumed_fut = None self._coordinator_lookup_lock = asyncio.Lock(loop=loop) self._commit_lock = asyncio.Lock(loop=loop) self._next_autocommit_deadline = loop.time() + auto_commit_interval_ms / 1000 self._closing = create_future(loop=loop) def _on_metadata_change(self): self.request_rejoin() async def _send_req(self, request): node_id = self.coordinator_id if node_id is None: raise Errors.GroupCoordinatorNotAvailableError() try: resp = await self._client.send( node_id, request, group=ConnectionGroup.COORDINATION) except Errors.KafkaError as err: log.error( 'Error sending %s to node %s [%s] -- marking coordinator dead', request.__class__.__name__, node_id, err) self.coordinator_dead() raise err return resp
Apache License 2.0
districtdatalabs/baleen
baleen/utils/logger.py
LoggingMixin.logger
python
def logger(self): if not hasattr(self, '_logger') or not self._logger: self._logger = IngestLogger() return self._logger
Instantiates and returns a IngestLogger instance
https://github.com/districtdatalabs/baleen/blob/bb2ae323a3ab3a066a4a289401847e1251abc55d/baleen/utils/logger.py#L198-L204
import logging import getpass import warnings import logging.config from baleen.config import settings from baleen.utils.timez import COMMON_DATETIME configuration = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'simple': { 'format': '%(name)s %(levelname)s [%(asctime)s] -- %(message)s', 'datefmt': COMMON_DATETIME, } }, 'handlers': { 'null': { 'level': 'DEBUG', 'class': 'logging.NullHandler', }, 'console': { 'level': 'WARNING', 'class': 'logging.StreamHandler', 'formatter': 'simple', }, 'logfile': { 'level': 'INFO', 'class': 'logging.handlers.RotatingFileHandler', 'filename': settings.logfile, 'maxBytes': '536870912', 'formatter': 'simple', }, 'mongolog': { 'level': 'INFO', 'class': 'baleen.utils.mongolog.MongoHandler', } }, 'loggers': { 'baleen': { 'level': settings.loglevel, 'handlers': ['logfile'], 'propagagte': True, }, 'baleen.ingest': { 'level': 'INFO', 'handlers': ['logfile', 'mongolog'], 'propagate': False, } }, } logging.config.dictConfigClass(configuration).configure() if not settings.debug: logging.captureWarnings(True) class WrappedLogger(object): logger = None def __init__(self, **kwargs): self.raise_warnings = kwargs.pop('raise_warnings', settings.debug) self.logger = kwargs.pop('logger', self.logger) if not self.logger or not hasattr(self.logger, 'log'): raise TypeError( "Subclasses must specify a logger, not {}" .format(type(self.logger)) ) self.extras = kwargs def log(self, level, message, *args, **kwargs): extra = self.extras.copy() extra.update(kwargs.pop('extra', {})) kwargs['extra'] = extra self.logger.log(level, message, *args, **kwargs) def debug(self, message, *args, **kwargs): return self.log(logging.DEBUG, message, *args, **kwargs) def info(self, message, *args, **kwargs): return self.log(logging.INFO, message, *args, **kwargs) def warning(self, message, *args, **kwargs): warncls = kwargs.pop('warning', None) if warncls and self.raise_warnings: warnings.warn(message, warncls) return self.log(logging.WARNING, message, *args, **kwargs) warn = warning def error(self, message, *args, **kwargs): return self.log(logging.ERROR, message, *args, **kwargs) def critical(self, message, *args, **kwargs): return self.log(logging.CRITICAL, message, *args, **kwargs) class IngestLogger(WrappedLogger): logger = logging.getLogger('baleen.ingest') def __init__(self, **kwargs): self._user = kwargs.pop('user', None) super(IngestLogger, self).__init__(**kwargs) @property def user(self): if not self._user: self._user = getpass.getuser() return self._user def log(self, level, message, *args, **kwargs): extra = kwargs.pop('extra', {}) extra.update({ 'user': self.user }) kwargs['extra'] = extra super(IngestLogger, self).log(level, message, *args, **kwargs) class LoggingMixin(object): @property
MIT License
cisco-en-programmability/dnacentersdk
dnacentersdk/api/v1_3_0/devices.py
Devices.get_module_info_by_id
python
def get_module_info_by_id(self, id, headers=None, **request_parameters): check_type(headers, dict) check_type(id, basestring, may_be_none=False) if headers is not None: if 'X-Auth-Token' in headers: check_type(headers.get('X-Auth-Token'), basestring, may_be_none=False) _params = { } _params.update(request_parameters) _params = dict_from_items_with_values(_params) path_params = { 'id': id, } with_custom_headers = False _headers = self._session.headers or {} if headers: _headers.update(dict_of_str(headers)) with_custom_headers = True e_url = ('/dna/intent/api/v1/network-device/module/${id}') endpoint_full_url = apply_path_params(e_url, path_params) if with_custom_headers: json_data = self._session.get(endpoint_full_url, params=_params, headers=_headers) else: json_data = self._session.get(endpoint_full_url, params=_params) return self._object_factory('bpm_0db7da744c0b83d8_v1_3_0', json_data)
Returns Module info by id. Args: id(basestring): id path parameter. headers(dict): Dictionary of HTTP Headers to send with the Request . **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: MyDict: JSON response. Access the object's properties by using the dot notation or the bracket notation. Raises: TypeError: If the parameter types are incorrect. MalformedRequest: If the request body created is invalid. ApiError: If the DNA Center cloud returns an error.
https://github.com/cisco-en-programmability/dnacentersdk/blob/ef2adde6113e7a6acd28a287007eb470fa39d31f/dnacentersdk/api/v1_3_0/devices.py#L75-L128
from __future__ import ( absolute_import, division, print_function, unicode_literals, ) from builtins import * from past.builtins import basestring from ...restsession import RestSession from ...utils import ( check_type, dict_from_items_with_values, apply_path_params, dict_of_str, ) class Devices(object): def __init__(self, session, object_factory, request_validator): check_type(session, RestSession) super(Devices, self).__init__() self._session = session self._object_factory = object_factory self._request_validator = request_validator
MIT License
googlecloudplatform/data-pipeline
app/src/pipelines/stages/httpinput.py
HttpInput.run
python
def run(self, config): start = config.get('start') if not start: start = 0 config['start'] = 0 if 'length' not in config: req = urllib2.Request(config['url']) req.add_header('Range', 'bytes=0-0') meta_inf = None with contextlib.closing(urllib2.urlopen(req)) as resp: meta_inf = resp.info() range_len = meta_inf.getheaders('Content-Range') if range_len: range_len = long(range_len[0].split('/')[1]) config['length'] = range_len - start else: logging.warning('Cannot determine resource length.') if 'shardSize' not in config: config['shardSize'] = self.REQUEST_CHUNK_SIZE (shards, compositors) = self.ShardStage(config) if shards and compositors: with pipeline.After(*[(yield shard) for shard in shards]): _ = [(yield compositor) for compositor in compositors] else: gcs_obj = config['sinks'][0] gcs_storage = gcs.Gcs() start = config.get('start', 0) length = config.get('length') req = urllib2.Request(config['url']) range_bytes = 'bytes=%s-%s' if length: range_bytes %= (start, start + length - 1) else: range_bytes %= (start, '') req.add_header('Range', range_bytes) with contextlib.closing(urllib2.urlopen(req, timeout=300)) as resp: with contextlib.closing(StringIO.StringIO(resp.read())) as resp_buf: gcs_storage.InsertObject(resp_buf, url=gcs_obj)
Runs the stage. Args: config: Specifies the source object(s) and sinks. Yields: If necessary, a pipeline future for a GcsCompositor stage
https://github.com/googlecloudplatform/data-pipeline/blob/0bdd1664bc9ff5e36928c4609ef6127ef1e1fb3f/app/src/pipelines/stages/httpinput.py#L68-L121
import contextlib import cStringIO as StringIO import logging import urllib2 import urlparse from src.clients import gcs from src.pipelines import pipeline from src.pipelines import shardstage class HttpInput(shardstage.ShardStage): CHUNK_SIZE_8MB = 1 << 23 CHUNK_SIZE_32MB = 1 << 25 REQUEST_CHUNK_SIZE = CHUNK_SIZE_8MB MAX_SHARD_SIZE = CHUNK_SIZE_32MB DEFAULT_CONTENT_TYPE = 'binary/octet-stream' @staticmethod def GetHelp(): return """Load Data from a URL. The stage config should look like this: ```python { "url": "...", "start": first_byte, "length": number_of_bytes, "shardSize": maximum_number_of_bytes, "shardPrefix": "...", } ``` If the contents of the url are larger than shardSize then multiple requests will be made in parallel to load parts of the url and then compisited together into the resulting file. * start and length are optional. * 'shardPrefix' can be used to organize the temporary objects, if any, created during the chunked transfer (and recomposition) of the object in GCS. * Any 'sources' for this stage config will be ignored. """
Apache License 2.0
parrot-developers/olympe
src/olympe/arsdkng/enums.py
ArsdkEnums.__init__
python
def __init__(self, arsdkparser_context=None): self._ctx = arsdkparser_context if self._ctx is None: self._ctx = ArsdkXml.get().ctx self._enums = OrderedDict() self._bitfields = OrderedDict() self._by_feature = OrderedDict() self._enums_feature = OrderedDict() self._enums_source = OrderedDict() self._enums_source[list_flags] = textwrap.dedent( """ class {}(ArsdkEnum): {} """.format("list_flags", "\n".join( map(lambda v: (v._name_ + " = " + str(v._value_)), list_flags))) ) for feature in self._ctx.features: FeatureName = feature.name[0].upper() + feature.name[1:] self._enums[FeatureName] = OrderedDict() self._bitfields[FeatureName] = OrderedDict() self._by_feature[feature.name] = OrderedDict() for classe_name in feature.classesByName: self._by_feature[feature.name][classe_name] = OrderedDict() for enum in feature.enums: self._add_enum(FeatureName, feature, enum) self._enums[FeatureName]["list_flags"] = list_flags self._bitfields[FeatureName]["list_flags_Bitfield"] = list_flags._bitfield_type_ self._by_feature[feature.name]["list_flags"] = list_flags self._by_feature[feature.name]["list_flags_Bitfield"] = list_flags._bitfield_type_ for feature in self._enums.values(): for enum in feature.values(): if len(enum.aliases()) > 1 and "Enum aliases" not in enum.__doc__: try: doc = "\n - ".join(map( lambda a: ":py:class:`olympe.enums.{}.{}`".format( self._enums_feature[a], a.__name__), enum.aliases())) doc = ( "\n\nEnum aliases:\n\n" + " - " + doc + "\n\n" ) enum.__doc__ = enum.__doc__ + doc except KeyError: pass
ArsdkEnums constructor @type arsdkparser_context: arsdkparser.ArParserCtx
https://github.com/parrot-developers/olympe/blob/08cd2d2f5880b766f94886c530f367f9b4cfc491/src/olympe/arsdkng/enums.py#L405-L455
from __future__ import unicode_literals from __future__ import absolute_import from future.builtins import str, bytes from future.builtins import int try: from __builtin__ import str as builtin_str except ImportError: from builtins import str as builtin_str long = int import os try: import textwrap3 as textwrap except ImportError: import textwrap from aenum import OrderedEnum, IntEnum from collections import OrderedDict from itertools import starmap from six import with_metaclass from olympe.arsdkng.xml import ArsdkXml from olympe._private import string_from_arsdkxml if not os.environ.get('OLYMPE_DEPRECATED_INTENUM', 'false') == 'true': _DeprecatedIntEnums = False _EnumBase = OrderedEnum else: _EnumBase = IntEnum _DeprecatedIntEnums = True class ArsdkBitfieldMeta(type): _base = None _classes = OrderedDict() def __new__(mcls, enum_type, *args, **kwds): if ArsdkBitfieldMeta._base is None: cls = type.__new__( ArsdkBitfieldMeta, builtin_str("ArsdkBitfield"), *args, **kwds) mcls._base = cls else: cls = mcls._classes.get(enum_type) if cls is not None: return cls cls = type.__new__( mcls, builtin_str(enum_type.__name__ + "_Bitfield"), (mcls._base,), dict(_enum_type_=enum_type)) mcls._classes[enum_type] = cls return cls @property def _feature_name_(cls): return ArsdkEnums.get()._enums_feature[cls._enum_type_] class ArsdkBitfield(with_metaclass(ArsdkBitfieldMeta)): def __init__(self, enums=[]): if isinstance(enums, self.__class__): self._enums = enums._enums[:] elif isinstance(enums, self._enum_type_): self._enums = [enums] elif isinstance(enums, (int)): self._enums = list(map(self._enum_type_, self._bits_order(enums))) elif isinstance(enums, (bytes, str)): self._enums = self.from_str(enums)._enums else: enums = list(sorted(map(self._enum_type_, enums))) if not all(map(lambda v: isinstance(v, self._enum_type_), enums)): raise TypeError( "Not all values in {} are of type {}".format(enums, self._enum_type_)) seen_enums = set() self._enums = [ enum for enum in enums if not (enum in seen_enums or seen_enums.add(enum))] @classmethod def _bits_order(cls, n): while n: b = n & (~n + 1) order = b.bit_length() - 1 yield (order if order >= 0 else 0) n ^= b @classmethod def from_str(cls, enums): if enums == '': return cls([]) try: enums = list(map(cls._enum_type_.__getitem__, enums.split('|'))) except KeyError as e: raise ValueError("{} is not an enum label of {}".format( str(e), cls._enum_type_.__name__)) return cls(enums) @classmethod def empty(cls): return cls() @classmethod def full(cls): return ~cls() def to_str(self): return str(self) def to_flag_list(self): flags = [] for enum in self.full(): flags.append(enum in self) return flags def __getattr__(self, name): try: enum = self.__class__._enum_type_.__getitem__(name) except KeyError: raise AttributeError( '{} is not a {} bitfield flag'.format(name, self.__class__.__name__)) return enum in self def __str__(self): return '|'.join(map(lambda v: v.name, self._enums)) def __repr__(self): return '<{}: {}>'.format(self.__class__.__name__, self._enums) def pretty(self): return "'" + '|'.join(map(lambda v: v.name, self._enums)) + "'" def __contains__(self, enum): return enum in self._enums def __iter__(self): return iter(self._enums) def to_int(self): r = 0 for enum in self._enums: r += 2 ** enum.value return r if _DeprecatedIntEnums: __int__ = to_int def __invert__(self): return self.__class__([enum for enum in self._enum_type_ if enum not in self._enums]) def __or__(self, other): other = self.__class__(other) return self.__class__(self._enums + other._enums) def __and__(self, other): other = self.__class__(other) return self.__class__([enum for enum in self._enums if enum in other._enums]) def __xor__(self, other): other = self.__class__(other) return self & ~other | ~self & other __ror__ = __or__ __rand__ = __and__ __xor__ = __xor__ def __eq__(self, other): other = self.__class__(other) return self.to_int() == other.to_int() def __neq__(self, other): return not self == other def __nonzero__(self): return bool(self.to_int()) __bool__ = __nonzero__ class ArsdkEnumMeta(_EnumBase.__class__): _base = None _classes = OrderedDict() _aliases = OrderedDict() def __new__(mcls, name, bases, ns): if ArsdkEnumMeta._base is None: cls = _EnumBase.__class__.__new__(mcls, builtin_str(name), (_EnumBase,), ns) ArsdkEnumMeta._base = cls else: class_key = (name,) + tuple((starmap(lambda k, v: k + "_" + str(v), ns.items()))) cls = mcls._classes.get(class_key) if cls is not None: return cls alias_key = tuple(starmap( lambda k, v: (k.replace('_', '').lower(), v), ns.items())) alias_name = (label + "_" + str(value) for label, value in alias_key) alias_name = str("ArsdkEnumAlias_" + '_'.join(alias_name)) if alias_key not in mcls._aliases: alias_base = _EnumBase.__class__.__new__( mcls, builtin_str(alias_name), (ArsdkEnumMeta._base,), {}) mcls._aliases[alias_key] = alias_base else: alias_base = mcls._aliases[alias_key] cls = _EnumBase.__class__.__new__(mcls, builtin_str(name), (alias_base,), ns) mcls._classes[class_key] = cls return cls @classmethod def __prepare__(mcls, cls, bases, *args, **kwds): if bases and not issubclass(bases[-1], _EnumBase): bases = (bases[-1], _EnumBase) elif not bases: bases = (_EnumBase,) return _EnumBase.__class__.__prepare__(cls, bases, *args, **kwds) @property def _bitfield_type_(cls): return ArsdkBitfieldMeta.__new__(ArsdkBitfieldMeta, cls) @property def _feature_name_(cls): return ArsdkEnums.get()._enums_feature[cls] @property def _source_(cls): return ArsdkEnums.get()._enums_source[cls] class ArsdkEnum(with_metaclass(ArsdkEnumMeta)): @classmethod def from_str(cls, value): if value == '': raise ValueError("Empty string cannot be converted to {}".format(cls.__name__)) try: return cls[value] except KeyError as e: raise ValueError("{} is not an enum label of {}".format(str(e), cls.__name__)) def to_str(self): return self._name_ def _to_bitfield(self): return self.__class__._bitfield_type_([self]) def __invert__(self): return self._to_bitfield().__invert__() def __or__(self, other): return self._to_bitfield().__or__(other) def __and__(self, other): return self._to_bitfield().__and__(other) def __xor__(self, other): return self._to_bitfield().__xor__(other) __ror__ = __or__ __rand__ = __and__ __xor__ = __xor__ @classmethod def aliases(cls): if cls.__base__.__name__.startswith("ArsdkEnumAlias"): return [alias for alias in cls.__base__.__subclasses__()] else: return [] def __eq__(self, other): if not _DeprecatedIntEnums: if other.__class__ in self.aliases(): return self._value_ == other._value_ else: return NotImplemented else: return super(ArsdkEnum, self).__eq__(other) def __ne__(self, other): if not _DeprecatedIntEnums: if other.__class__ in self.aliases(): return self._value_ != other._value_ else: return NotImplemented else: return super(ArsdkEnum, self).__ne__(other) def __hash__(self): return self._value_ if _DeprecatedIntEnums: def __str__(self): return ('"olympe.enums.' + self.__class__._feature_name_ + '.' + self.__class__.__name__ + '.' + self._name_ + '"') def __int__(self): return self class list_flags(ArsdkEnum): First, Last, Empty, Remove = range(4) class ArsdkEnums(object): _single = None @classmethod def get(cls): if cls._single is None: cls._single = cls() return cls._single
BSD 3-Clause New or Revised License
life4/deal
deal/linter/_template.py
validate
python
def validate(args, kwargs) -> None: base = Validator(validator=contract, exception=deal.ContractError) if func is not Ellipsis: base.function = func old_name = None for _ in range(10): try: base.validate(args, kwargs) return except NameError as err: name = err.args[0].split("'")[1] if name == old_name: raise ok = inject(name) if not ok: raise continue
Run validator, trying to fix missed imports on the way.
https://github.com/life4/deal/blob/d826d060740429575cf6675396ea3a61ab99de2a/deal/linter/_template.py#L24-L50
from importlib import import_module import deal from deal._runtime import Validator contract = ... func = ... def inject(name: str) -> bool: try: globals()[name] = import_module(name) except ImportError: return False return True
MIT License
docusign/docusign-python-client
docusign_esign/models/sign_here.py
SignHere.anchor_tab_processor_version_metadata
python
def anchor_tab_processor_version_metadata(self, anchor_tab_processor_version_metadata): self._anchor_tab_processor_version_metadata = anchor_tab_processor_version_metadata
Sets the anchor_tab_processor_version_metadata of this SignHere. :param anchor_tab_processor_version_metadata: The anchor_tab_processor_version_metadata of this SignHere. # noqa: E501 :type: PropertyMetadata
https://github.com/docusign/docusign-python-client/blob/c6aeafff0d046fa6c10a398be83ba9e24b05d4ea/docusign_esign/models/sign_here.py#L676-L684
import pprint import re import six from docusign_esign.client.configuration import Configuration class SignHere(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'anchor_allow_white_space_in_characters': 'str', 'anchor_allow_white_space_in_characters_metadata': 'PropertyMetadata', 'anchor_case_sensitive': 'str', 'anchor_case_sensitive_metadata': 'PropertyMetadata', 'anchor_horizontal_alignment': 'str', 'anchor_horizontal_alignment_metadata': 'PropertyMetadata', 'anchor_ignore_if_not_present': 'str', 'anchor_ignore_if_not_present_metadata': 'PropertyMetadata', 'anchor_match_whole_word': 'str', 'anchor_match_whole_word_metadata': 'PropertyMetadata', 'anchor_string': 'str', 'anchor_string_metadata': 'PropertyMetadata', 'anchor_tab_processor_version': 'str', 'anchor_tab_processor_version_metadata': 'PropertyMetadata', 'anchor_units': 'str', 'anchor_units_metadata': 'PropertyMetadata', 'anchor_x_offset': 'str', 'anchor_x_offset_metadata': 'PropertyMetadata', 'anchor_y_offset': 'str', 'anchor_y_offset_metadata': 'PropertyMetadata', 'conditional_parent_label': 'str', 'conditional_parent_label_metadata': 'PropertyMetadata', 'conditional_parent_value': 'str', 'conditional_parent_value_metadata': 'PropertyMetadata', 'custom_tab_id': 'str', 'custom_tab_id_metadata': 'PropertyMetadata', 'document_id': 'str', 'document_id_metadata': 'PropertyMetadata', 'error_details': 'ErrorDetails', 'form_order': 'str', 'form_order_metadata': 'PropertyMetadata', 'form_page_label': 'str', 'form_page_label_metadata': 'PropertyMetadata', 'form_page_number': 'str', 'form_page_number_metadata': 'PropertyMetadata', 'hand_draw_required': 'str', 'height': 'str', 'height_metadata': 'PropertyMetadata', 'is_seal_sign_tab': 'str', 'merge_field': 'MergeField', 'merge_field_xml': 'str', 'name': 'str', 'name_metadata': 'PropertyMetadata', 'optional': 'str', 'optional_metadata': 'PropertyMetadata', 'page_number': 'str', 'page_number_metadata': 'PropertyMetadata', 'recipient_id': 'str', 'recipient_id_guid': 'str', 'recipient_id_guid_metadata': 'PropertyMetadata', 'recipient_id_metadata': 'PropertyMetadata', 'scale_value': 'str', 'scale_value_metadata': 'PropertyMetadata', 'smart_contract_information': 'SmartContractInformation', 'source': 'str', 'stamp': 'Stamp', 'stamp_type': 'str', 'stamp_type_metadata': 'PropertyMetadata', 'status': 'str', 'status_metadata': 'PropertyMetadata', 'tab_group_labels': 'list[str]', 'tab_group_labels_metadata': 'PropertyMetadata', 'tab_id': 'str', 'tab_id_metadata': 'PropertyMetadata', 'tab_label': 'str', 'tab_label_metadata': 'PropertyMetadata', 'tab_order': 'str', 'tab_order_metadata': 'PropertyMetadata', 'tab_type': 'str', 'tab_type_metadata': 'PropertyMetadata', 'template_locked': 'str', 'template_locked_metadata': 'PropertyMetadata', 'template_required': 'str', 'template_required_metadata': 'PropertyMetadata', 'tooltip': 'str', 'tool_tip_metadata': 'PropertyMetadata', 'width': 'str', 'width_metadata': 'PropertyMetadata', 'x_position': 'str', 'x_position_metadata': 'PropertyMetadata', 'y_position': 'str', 'y_position_metadata': 'PropertyMetadata' } attribute_map = { 'anchor_allow_white_space_in_characters': 'anchorAllowWhiteSpaceInCharacters', 'anchor_allow_white_space_in_characters_metadata': 'anchorAllowWhiteSpaceInCharactersMetadata', 'anchor_case_sensitive': 'anchorCaseSensitive', 'anchor_case_sensitive_metadata': 'anchorCaseSensitiveMetadata', 'anchor_horizontal_alignment': 'anchorHorizontalAlignment', 'anchor_horizontal_alignment_metadata': 'anchorHorizontalAlignmentMetadata', 'anchor_ignore_if_not_present': 'anchorIgnoreIfNotPresent', 'anchor_ignore_if_not_present_metadata': 'anchorIgnoreIfNotPresentMetadata', 'anchor_match_whole_word': 'anchorMatchWholeWord', 'anchor_match_whole_word_metadata': 'anchorMatchWholeWordMetadata', 'anchor_string': 'anchorString', 'anchor_string_metadata': 'anchorStringMetadata', 'anchor_tab_processor_version': 'anchorTabProcessorVersion', 'anchor_tab_processor_version_metadata': 'anchorTabProcessorVersionMetadata', 'anchor_units': 'anchorUnits', 'anchor_units_metadata': 'anchorUnitsMetadata', 'anchor_x_offset': 'anchorXOffset', 'anchor_x_offset_metadata': 'anchorXOffsetMetadata', 'anchor_y_offset': 'anchorYOffset', 'anchor_y_offset_metadata': 'anchorYOffsetMetadata', 'conditional_parent_label': 'conditionalParentLabel', 'conditional_parent_label_metadata': 'conditionalParentLabelMetadata', 'conditional_parent_value': 'conditionalParentValue', 'conditional_parent_value_metadata': 'conditionalParentValueMetadata', 'custom_tab_id': 'customTabId', 'custom_tab_id_metadata': 'customTabIdMetadata', 'document_id': 'documentId', 'document_id_metadata': 'documentIdMetadata', 'error_details': 'errorDetails', 'form_order': 'formOrder', 'form_order_metadata': 'formOrderMetadata', 'form_page_label': 'formPageLabel', 'form_page_label_metadata': 'formPageLabelMetadata', 'form_page_number': 'formPageNumber', 'form_page_number_metadata': 'formPageNumberMetadata', 'hand_draw_required': 'handDrawRequired', 'height': 'height', 'height_metadata': 'heightMetadata', 'is_seal_sign_tab': 'isSealSignTab', 'merge_field': 'mergeField', 'merge_field_xml': 'mergeFieldXml', 'name': 'name', 'name_metadata': 'nameMetadata', 'optional': 'optional', 'optional_metadata': 'optionalMetadata', 'page_number': 'pageNumber', 'page_number_metadata': 'pageNumberMetadata', 'recipient_id': 'recipientId', 'recipient_id_guid': 'recipientIdGuid', 'recipient_id_guid_metadata': 'recipientIdGuidMetadata', 'recipient_id_metadata': 'recipientIdMetadata', 'scale_value': 'scaleValue', 'scale_value_metadata': 'scaleValueMetadata', 'smart_contract_information': 'smartContractInformation', 'source': 'source', 'stamp': 'stamp', 'stamp_type': 'stampType', 'stamp_type_metadata': 'stampTypeMetadata', 'status': 'status', 'status_metadata': 'statusMetadata', 'tab_group_labels': 'tabGroupLabels', 'tab_group_labels_metadata': 'tabGroupLabelsMetadata', 'tab_id': 'tabId', 'tab_id_metadata': 'tabIdMetadata', 'tab_label': 'tabLabel', 'tab_label_metadata': 'tabLabelMetadata', 'tab_order': 'tabOrder', 'tab_order_metadata': 'tabOrderMetadata', 'tab_type': 'tabType', 'tab_type_metadata': 'tabTypeMetadata', 'template_locked': 'templateLocked', 'template_locked_metadata': 'templateLockedMetadata', 'template_required': 'templateRequired', 'template_required_metadata': 'templateRequiredMetadata', 'tooltip': 'tooltip', 'tool_tip_metadata': 'toolTipMetadata', 'width': 'width', 'width_metadata': 'widthMetadata', 'x_position': 'xPosition', 'x_position_metadata': 'xPositionMetadata', 'y_position': 'yPosition', 'y_position_metadata': 'yPositionMetadata' } def __init__(self, _configuration=None, **kwargs): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._anchor_allow_white_space_in_characters = None self._anchor_allow_white_space_in_characters_metadata = None self._anchor_case_sensitive = None self._anchor_case_sensitive_metadata = None self._anchor_horizontal_alignment = None self._anchor_horizontal_alignment_metadata = None self._anchor_ignore_if_not_present = None self._anchor_ignore_if_not_present_metadata = None self._anchor_match_whole_word = None self._anchor_match_whole_word_metadata = None self._anchor_string = None self._anchor_string_metadata = None self._anchor_tab_processor_version = None self._anchor_tab_processor_version_metadata = None self._anchor_units = None self._anchor_units_metadata = None self._anchor_x_offset = None self._anchor_x_offset_metadata = None self._anchor_y_offset = None self._anchor_y_offset_metadata = None self._conditional_parent_label = None self._conditional_parent_label_metadata = None self._conditional_parent_value = None self._conditional_parent_value_metadata = None self._custom_tab_id = None self._custom_tab_id_metadata = None self._document_id = None self._document_id_metadata = None self._error_details = None self._form_order = None self._form_order_metadata = None self._form_page_label = None self._form_page_label_metadata = None self._form_page_number = None self._form_page_number_metadata = None self._hand_draw_required = None self._height = None self._height_metadata = None self._is_seal_sign_tab = None self._merge_field = None self._merge_field_xml = None self._name = None self._name_metadata = None self._optional = None self._optional_metadata = None self._page_number = None self._page_number_metadata = None self._recipient_id = None self._recipient_id_guid = None self._recipient_id_guid_metadata = None self._recipient_id_metadata = None self._scale_value = None self._scale_value_metadata = None self._smart_contract_information = None self._source = None self._stamp = None self._stamp_type = None self._stamp_type_metadata = None self._status = None self._status_metadata = None self._tab_group_labels = None self._tab_group_labels_metadata = None self._tab_id = None self._tab_id_metadata = None self._tab_label = None self._tab_label_metadata = None self._tab_order = None self._tab_order_metadata = None self._tab_type = None self._tab_type_metadata = None self._template_locked = None self._template_locked_metadata = None self._template_required = None self._template_required_metadata = None self._tooltip = None self._tool_tip_metadata = None self._width = None self._width_metadata = None self._x_position = None self._x_position_metadata = None self._y_position = None self._y_position_metadata = None self.discriminator = None setattr(self, "_{}".format('anchor_allow_white_space_in_characters'), kwargs.get('anchor_allow_white_space_in_characters', None)) setattr(self, "_{}".format('anchor_allow_white_space_in_characters_metadata'), kwargs.get('anchor_allow_white_space_in_characters_metadata', None)) setattr(self, "_{}".format('anchor_case_sensitive'), kwargs.get('anchor_case_sensitive', None)) setattr(self, "_{}".format('anchor_case_sensitive_metadata'), kwargs.get('anchor_case_sensitive_metadata', None)) setattr(self, "_{}".format('anchor_horizontal_alignment'), kwargs.get('anchor_horizontal_alignment', None)) setattr(self, "_{}".format('anchor_horizontal_alignment_metadata'), kwargs.get('anchor_horizontal_alignment_metadata', None)) setattr(self, "_{}".format('anchor_ignore_if_not_present'), kwargs.get('anchor_ignore_if_not_present', None)) setattr(self, "_{}".format('anchor_ignore_if_not_present_metadata'), kwargs.get('anchor_ignore_if_not_present_metadata', None)) setattr(self, "_{}".format('anchor_match_whole_word'), kwargs.get('anchor_match_whole_word', None)) setattr(self, "_{}".format('anchor_match_whole_word_metadata'), kwargs.get('anchor_match_whole_word_metadata', None)) setattr(self, "_{}".format('anchor_string'), kwargs.get('anchor_string', None)) setattr(self, "_{}".format('anchor_string_metadata'), kwargs.get('anchor_string_metadata', None)) setattr(self, "_{}".format('anchor_tab_processor_version'), kwargs.get('anchor_tab_processor_version', None)) setattr(self, "_{}".format('anchor_tab_processor_version_metadata'), kwargs.get('anchor_tab_processor_version_metadata', None)) setattr(self, "_{}".format('anchor_units'), kwargs.get('anchor_units', None)) setattr(self, "_{}".format('anchor_units_metadata'), kwargs.get('anchor_units_metadata', None)) setattr(self, "_{}".format('anchor_x_offset'), kwargs.get('anchor_x_offset', None)) setattr(self, "_{}".format('anchor_x_offset_metadata'), kwargs.get('anchor_x_offset_metadata', None)) setattr(self, "_{}".format('anchor_y_offset'), kwargs.get('anchor_y_offset', None)) setattr(self, "_{}".format('anchor_y_offset_metadata'), kwargs.get('anchor_y_offset_metadata', None)) setattr(self, "_{}".format('conditional_parent_label'), kwargs.get('conditional_parent_label', None)) setattr(self, "_{}".format('conditional_parent_label_metadata'), kwargs.get('conditional_parent_label_metadata', None)) setattr(self, "_{}".format('conditional_parent_value'), kwargs.get('conditional_parent_value', None)) setattr(self, "_{}".format('conditional_parent_value_metadata'), kwargs.get('conditional_parent_value_metadata', None)) setattr(self, "_{}".format('custom_tab_id'), kwargs.get('custom_tab_id', None)) setattr(self, "_{}".format('custom_tab_id_metadata'), kwargs.get('custom_tab_id_metadata', None)) setattr(self, "_{}".format('document_id'), kwargs.get('document_id', None)) setattr(self, "_{}".format('document_id_metadata'), kwargs.get('document_id_metadata', None)) setattr(self, "_{}".format('error_details'), kwargs.get('error_details', None)) setattr(self, "_{}".format('form_order'), kwargs.get('form_order', None)) setattr(self, "_{}".format('form_order_metadata'), kwargs.get('form_order_metadata', None)) setattr(self, "_{}".format('form_page_label'), kwargs.get('form_page_label', None)) setattr(self, "_{}".format('form_page_label_metadata'), kwargs.get('form_page_label_metadata', None)) setattr(self, "_{}".format('form_page_number'), kwargs.get('form_page_number', None)) setattr(self, "_{}".format('form_page_number_metadata'), kwargs.get('form_page_number_metadata', None)) setattr(self, "_{}".format('hand_draw_required'), kwargs.get('hand_draw_required', None)) setattr(self, "_{}".format('height'), kwargs.get('height', None)) setattr(self, "_{}".format('height_metadata'), kwargs.get('height_metadata', None)) setattr(self, "_{}".format('is_seal_sign_tab'), kwargs.get('is_seal_sign_tab', None)) setattr(self, "_{}".format('merge_field'), kwargs.get('merge_field', None)) setattr(self, "_{}".format('merge_field_xml'), kwargs.get('merge_field_xml', None)) setattr(self, "_{}".format('name'), kwargs.get('name', None)) setattr(self, "_{}".format('name_metadata'), kwargs.get('name_metadata', None)) setattr(self, "_{}".format('optional'), kwargs.get('optional', None)) setattr(self, "_{}".format('optional_metadata'), kwargs.get('optional_metadata', None)) setattr(self, "_{}".format('page_number'), kwargs.get('page_number', None)) setattr(self, "_{}".format('page_number_metadata'), kwargs.get('page_number_metadata', None)) setattr(self, "_{}".format('recipient_id'), kwargs.get('recipient_id', None)) setattr(self, "_{}".format('recipient_id_guid'), kwargs.get('recipient_id_guid', None)) setattr(self, "_{}".format('recipient_id_guid_metadata'), kwargs.get('recipient_id_guid_metadata', None)) setattr(self, "_{}".format('recipient_id_metadata'), kwargs.get('recipient_id_metadata', None)) setattr(self, "_{}".format('scale_value'), kwargs.get('scale_value', None)) setattr(self, "_{}".format('scale_value_metadata'), kwargs.get('scale_value_metadata', None)) setattr(self, "_{}".format('smart_contract_information'), kwargs.get('smart_contract_information', None)) setattr(self, "_{}".format('source'), kwargs.get('source', None)) setattr(self, "_{}".format('stamp'), kwargs.get('stamp', None)) setattr(self, "_{}".format('stamp_type'), kwargs.get('stamp_type', None)) setattr(self, "_{}".format('stamp_type_metadata'), kwargs.get('stamp_type_metadata', None)) setattr(self, "_{}".format('status'), kwargs.get('status', None)) setattr(self, "_{}".format('status_metadata'), kwargs.get('status_metadata', None)) setattr(self, "_{}".format('tab_group_labels'), kwargs.get('tab_group_labels', None)) setattr(self, "_{}".format('tab_group_labels_metadata'), kwargs.get('tab_group_labels_metadata', None)) setattr(self, "_{}".format('tab_id'), kwargs.get('tab_id', None)) setattr(self, "_{}".format('tab_id_metadata'), kwargs.get('tab_id_metadata', None)) setattr(self, "_{}".format('tab_label'), kwargs.get('tab_label', None)) setattr(self, "_{}".format('tab_label_metadata'), kwargs.get('tab_label_metadata', None)) setattr(self, "_{}".format('tab_order'), kwargs.get('tab_order', None)) setattr(self, "_{}".format('tab_order_metadata'), kwargs.get('tab_order_metadata', None)) setattr(self, "_{}".format('tab_type'), kwargs.get('tab_type', None)) setattr(self, "_{}".format('tab_type_metadata'), kwargs.get('tab_type_metadata', None)) setattr(self, "_{}".format('template_locked'), kwargs.get('template_locked', None)) setattr(self, "_{}".format('template_locked_metadata'), kwargs.get('template_locked_metadata', None)) setattr(self, "_{}".format('template_required'), kwargs.get('template_required', None)) setattr(self, "_{}".format('template_required_metadata'), kwargs.get('template_required_metadata', None)) setattr(self, "_{}".format('tooltip'), kwargs.get('tooltip', None)) setattr(self, "_{}".format('tool_tip_metadata'), kwargs.get('tool_tip_metadata', None)) setattr(self, "_{}".format('width'), kwargs.get('width', None)) setattr(self, "_{}".format('width_metadata'), kwargs.get('width_metadata', None)) setattr(self, "_{}".format('x_position'), kwargs.get('x_position', None)) setattr(self, "_{}".format('x_position_metadata'), kwargs.get('x_position_metadata', None)) setattr(self, "_{}".format('y_position'), kwargs.get('y_position', None)) setattr(self, "_{}".format('y_position_metadata'), kwargs.get('y_position_metadata', None)) @property def anchor_allow_white_space_in_characters(self): return self._anchor_allow_white_space_in_characters @anchor_allow_white_space_in_characters.setter def anchor_allow_white_space_in_characters(self, anchor_allow_white_space_in_characters): self._anchor_allow_white_space_in_characters = anchor_allow_white_space_in_characters @property def anchor_allow_white_space_in_characters_metadata(self): return self._anchor_allow_white_space_in_characters_metadata @anchor_allow_white_space_in_characters_metadata.setter def anchor_allow_white_space_in_characters_metadata(self, anchor_allow_white_space_in_characters_metadata): self._anchor_allow_white_space_in_characters_metadata = anchor_allow_white_space_in_characters_metadata @property def anchor_case_sensitive(self): return self._anchor_case_sensitive @anchor_case_sensitive.setter def anchor_case_sensitive(self, anchor_case_sensitive): self._anchor_case_sensitive = anchor_case_sensitive @property def anchor_case_sensitive_metadata(self): return self._anchor_case_sensitive_metadata @anchor_case_sensitive_metadata.setter def anchor_case_sensitive_metadata(self, anchor_case_sensitive_metadata): self._anchor_case_sensitive_metadata = anchor_case_sensitive_metadata @property def anchor_horizontal_alignment(self): return self._anchor_horizontal_alignment @anchor_horizontal_alignment.setter def anchor_horizontal_alignment(self, anchor_horizontal_alignment): self._anchor_horizontal_alignment = anchor_horizontal_alignment @property def anchor_horizontal_alignment_metadata(self): return self._anchor_horizontal_alignment_metadata @anchor_horizontal_alignment_metadata.setter def anchor_horizontal_alignment_metadata(self, anchor_horizontal_alignment_metadata): self._anchor_horizontal_alignment_metadata = anchor_horizontal_alignment_metadata @property def anchor_ignore_if_not_present(self): return self._anchor_ignore_if_not_present @anchor_ignore_if_not_present.setter def anchor_ignore_if_not_present(self, anchor_ignore_if_not_present): self._anchor_ignore_if_not_present = anchor_ignore_if_not_present @property def anchor_ignore_if_not_present_metadata(self): return self._anchor_ignore_if_not_present_metadata @anchor_ignore_if_not_present_metadata.setter def anchor_ignore_if_not_present_metadata(self, anchor_ignore_if_not_present_metadata): self._anchor_ignore_if_not_present_metadata = anchor_ignore_if_not_present_metadata @property def anchor_match_whole_word(self): return self._anchor_match_whole_word @anchor_match_whole_word.setter def anchor_match_whole_word(self, anchor_match_whole_word): self._anchor_match_whole_word = anchor_match_whole_word @property def anchor_match_whole_word_metadata(self): return self._anchor_match_whole_word_metadata @anchor_match_whole_word_metadata.setter def anchor_match_whole_word_metadata(self, anchor_match_whole_word_metadata): self._anchor_match_whole_word_metadata = anchor_match_whole_word_metadata @property def anchor_string(self): return self._anchor_string @anchor_string.setter def anchor_string(self, anchor_string): self._anchor_string = anchor_string @property def anchor_string_metadata(self): return self._anchor_string_metadata @anchor_string_metadata.setter def anchor_string_metadata(self, anchor_string_metadata): self._anchor_string_metadata = anchor_string_metadata @property def anchor_tab_processor_version(self): return self._anchor_tab_processor_version @anchor_tab_processor_version.setter def anchor_tab_processor_version(self, anchor_tab_processor_version): self._anchor_tab_processor_version = anchor_tab_processor_version @property def anchor_tab_processor_version_metadata(self): return self._anchor_tab_processor_version_metadata @anchor_tab_processor_version_metadata.setter
MIT License
jest-community/jest-pytest
src/__tests__/integration/home-assistant/homeassistant/scripts/credstash.py
run
python
def run(args): parser = argparse.ArgumentParser( description=("Modify Home Assistant secrets in credstash." "Use the secrets in configuration files with: " "!secret <name>")) parser.add_argument( '--script', choices=['credstash']) parser.add_argument( 'action', choices=['get', 'put', 'del', 'list'], help="Get, put or delete a secret, or list all available secrets") parser.add_argument( 'name', help="Name of the secret", nargs='?', default=None) parser.add_argument( 'value', help="The value to save when putting a secret", nargs='?', default=None) import credstash import botocore args = parser.parse_args(args) table = _SECRET_NAMESPACE try: credstash.listSecrets(table=table) except botocore.errorfactory.ClientError: credstash.createDdbTable(table=table) if args.action == 'list': secrets = [i['name'] for i in credstash.listSecrets(table=table)] deduped_secrets = sorted(set(secrets)) print('Saved secrets:') for secret in deduped_secrets: print(secret) return 0 if args.name is None: parser.print_help() return 1 if args.action == 'put': if args.value: the_secret = args.value else: the_secret = getpass.getpass('Please enter the secret for {}: ' .format(args.name)) current_version = credstash.getHighestVersion(args.name, table=table) credstash.putSecret(args.name, the_secret, version=int(current_version) + 1, table=table) print('Secret {} put successfully'.format(args.name)) elif args.action == 'get': the_secret = credstash.getSecret(args.name, table=table) if the_secret is None: print('Secret {} not found'.format(args.name)) else: print('Secret {}={}'.format(args.name, the_secret)) elif args.action == 'del': credstash.deleteSecrets(args.name, table=table) print('Deleted secret {}'.format(args.name))
Handle credstash script.
https://github.com/jest-community/jest-pytest/blob/b197b0b31e3ca5c411202d97583cbd2d2b0b92e9/src/__tests__/integration/home-assistant/homeassistant/scripts/credstash.py#L10-L72
import argparse import getpass from homeassistant.util.yaml import _SECRET_NAMESPACE REQUIREMENTS = ['credstash==1.14.0', 'botocore==1.7.34']
MIT License
jdkandersson/openalchemy
open_alchemy/schemas/artifacts/model.py
get
python
def get( schemas: oa_types.Schemas, schema: oa_types.Schema ) -> types.ModelExPropertiesArtifacts: tablename = peek.prefer_local( get_value=peek.tablename, schema=schema, schemas=schemas ) assert tablename is not None inherits = schema_helper.inherits(schema=schema, schemas=schemas) parent: typing.Optional[str] = None if inherits is True: parent = inheritance.get_parent(schema=schema, schemas=schemas) description = peek.prefer_local( get_value=peek.description, schema=schema, schemas=schemas ) mixins = peek.prefer_local(get_value=peek.mixins, schema=schema, schemas=schemas) kwargs = peek.prefer_local(get_value=peek.kwargs, schema=schema, schemas=schemas) composite_index: typing.Optional[oa_types.IndexList] = None composite_index_value = peek.prefer_local( get_value=peek.composite_index, schema=schema, schemas=schemas ) if composite_index_value is not None: composite_index = factory.map_index(spec=composite_index_value) composite_unique: typing.Optional[oa_types.UniqueList] = None composite_unique_value = peek.prefer_local( get_value=peek.composite_unique, schema=schema, schemas=schemas ) if composite_unique_value is not None: composite_unique = factory.map_unique(spec=composite_unique_value) backrefs = iterate.backrefs_items(schema=schema, schemas=schemas) backrefs_artifacts = map( lambda args: (args[0], _calculate_backref(args[1])), backrefs ) return types.ModelExPropertiesArtifacts( tablename=tablename, inherits=inherits, parent=parent, description=description, mixins=mixins, kwargs=kwargs, composite_index=composite_index, composite_unique=composite_unique, backrefs=list(backrefs_artifacts), )
Retrieve the artifacts for the model. Assume that the schema is valid. Args: schema: The model schema. schemas: All the defined schemas used to resolve any $ref. Returns: The artifacts for the model.
https://github.com/jdkandersson/openalchemy/blob/40f52d003e40ad79e67dcb305aef3dd4debefcc9/open_alchemy/schemas/artifacts/model.py#L32-L94
import typing from ... import types as oa_types from ...helpers import ext_prop from ...helpers import inheritance from ...helpers import peek from ...helpers import schema as schema_helper from ...table_args import factory from ..helpers import iterate from . import types def _calculate_backref(schema: oa_types.Schema) -> types.ModelBackrefArtifacts: type_ = peek.type_(schema=schema, schemas={}) assert type_ in {"object", "array"} if type_ == "object": parent = ext_prop.get(source=schema, name=oa_types.ExtensionProperties.DE_REF) assert isinstance(parent, str) return types.ModelBackrefArtifacts(types.BackrefSubType.OBJECT, parent) items_schema = peek.items(schema=schema, schemas={}) assert items_schema is not None parent = ext_prop.get(source=items_schema, name=oa_types.ExtensionProperties.DE_REF) assert isinstance(parent, str) return types.ModelBackrefArtifacts(types.BackrefSubType.ARRAY, parent)
Apache License 2.0
dr-guangtou/unagi
unagi/sky.py
AperPhot.flux
python
def flux(self, band=None, rerun='s18a'): if rerun == 's18a': if band is not None: return "{0}_apertureflux_{1}_flux".format(band.strip(), self.aper_id) return "apertureflux_{0}_flux".format(self.aper_id) else: raise NotImplementedError("# Only S18A data are available.")
Aperture flux column name in S18A.
https://github.com/dr-guangtou/unagi/blob/18ba250ef4d13a448183110dd6b76fb4cedb36e3/unagi/sky.py#L41-L48
import os import warnings import numpy as np from astropy.table import Table from scipy.stats import sigmaclip from scipy.stats import binned_statistic_2d from . import utils from . import plotting __all__ = ['SkyObjs', 'AperPhot', 'S18A_APER'] class AperPhot(): PIX = 0.168 def __init__(self, name, rad, rerun='s18a'): self.aper_id = name self.name = "aper{0}".format(self.aper_id) self.r_pix = rad self.area_pix = np.pi * (rad ** 2.0) self.r_arcsec = rad * self.PIX self.area_arcsec = np.pi * (self.r_arcsec ** 2.0) self.rerun = rerun self.flux_col = self.flux(rerun=self.rerun) self.err_col = self.err(rerun=self.rerun)
MIT License
cidles/poio-api
src/poioapi/annotationgraph.py
AnnotationGraph._node_as_html_table
python
def _node_as_html_table(self, node, hierarchy): table = "<table style=\"margin:0;padding:0;float:left;border-collapse:collapse;\">" inserted = 0 for i, t in enumerate(hierarchy): table += "<tr style=\"margin:0;padding:0;\">" if type(t) is list: table += "<td style=\"margin:0;padding:0px;\">" node_list = self.nodes_for_tier(t[0], node) for i, n in enumerate(node_list): table += self._node_as_html_table(n, t) table += "</td>" else: a_list = self.annotations_for_tier(t, node) a = "" if len(a_list) > 0: a = self.annotation_value_for_annotation(a_list[0]) if a == "": a = "&nbsp;" table += "<td style=\"margin:0;padding:3px;\">{0}</td>".format(a) table += "</tr>" table += "</table>" return table
Create an html table for a node. Parameters ---------- node : array_like The root node to start the traversal. hierarchy: array_like An array with the data structure hierarchy. Returns ------- html : str An html table of the node.
https://github.com/cidles/poio-api/blob/0f66dff5531a4648e617343e3cfdb7e57f20881a/src/poioapi/annotationgraph.py#L385-L436
from __future__ import absolute_import, unicode_literals import sys import os.path import re import codecs from xml.dom import minidom from xml.etree.ElementTree import Element, SubElement, tostring import poioapi.io.elan import poioapi.io.mandinka import poioapi.io.obt import poioapi.io.graf import poioapi.io.toolbox import poioapi.io.toolboxxml import poioapi.io.shoebox import poioapi.io.typecraft import poioapi.io.odin import poioapi.data import poioapi.mapper import graf class AnnotationGraph(): def __init__(self, data_structure_type = None): if data_structure_type is None: self.structure_type_handler = None elif isinstance(data_structure_type, poioapi.data.DataStructureType): self.structure_type_handler = data_structure_type else: raise( poioapi.data.DataStructureTypeNotSupportedError( "Data structure type {0} not supported".format( data_structure_type))) self.graf = None self.tier_hierarchies = None self.meta_information = None self.root_tiers = [] self.primary_data = None self.source_type = None self.filters = [] self.filtered_node_ids = [] self.tier_mapper = poioapi.mapper.TierMapper() @classmethod def from_elan(cls, stream): return cls._from_file(stream, poioapi.data.EAF) @classmethod def from_mandinka(cls, stream, tier_map_file_path=''): cls.tier_mapper = poioapi.io.mandinka.tier_mapping() return cls._from_file(stream, poioapi.data.MANDINKA, tier_map_file_path=tier_map_file_path) @classmethod def from_obt(cls, stream): return cls._from_file(stream, poioapi.data.OBT) @classmethod def from_typecraft(cls, stream): return cls._from_file(stream, poioapi.data.TYPECRAFT) @classmethod def from_shoebox(cls, stream): return cls._from_file(stream, poioapi.data.SHOEBOX) @classmethod def from_toolboxxml(cls, stream): return cls._from_file(stream, poioapi.data.TOOLBOXXML) @classmethod def from_toolbox(cls, stream, tier_map_file_path=''): cls.tier_mapper = poioapi.io.toolbox.tier_mapping() return cls._from_file(stream, poioapi.data.TOOLBOX, tier_map_file_path=tier_map_file_path) @classmethod def from_graf(cls, stream): ag = cls() if not hasattr(stream, 'read'): stream = ag._open_file_(stream) parser = graf.GraphParser() ag.graf = parser.parse(stream) ag.from_file_type = poioapi.data.GRAF return ag @classmethod def from_odin(cls, stream, tier_map_file_path=''): return cls._from_file(stream, poioapi.data.ODIN, tier_map_file_path=tier_map_file_path) def _open_file_(self, filename): if sys.version_info[:2] < (3, 0): return codecs.open(filename, "rb") return codecs.open(filename, "r", "utf-8") @classmethod def _from_file(cls, stream, stream_type, tier_labels_file_path='', **kwargs): ag = cls() if tier_labels_file_path != '' and tier_labels_file_path is not None: ag.tier_mapper.load_mapping(tier_labels_file_path) if stream_type != poioapi.data.TOOLBOX and stream_type != poioapi.data.MANDINKA: if not hasattr(stream, 'read'): stream = ag._open_file_(stream) parser = None if stream_type == poioapi.data.EAF: parser = poioapi.io.elan.Parser(stream) elif stream_type == poioapi.data.MANDINKA: if not hasattr(stream, 'read'): stream = codecs.open(stream, "rb") parser = poioapi.io.mandinka.Parser(stream, tier_label_map=ag.tier_mapper) elif stream_type == poioapi.data.OBT: parser = poioapi.io.obt.Parser(stream) elif stream_type == poioapi.data.TYPECRAFT: parser = poioapi.io.typecraft.Parser(stream) elif stream_type == poioapi.data.TOOLBOXXML: parser = poioapi.io.toolboxxml.Parser(stream) elif stream_type == poioapi.data.SHOEBOX: parser = poioapi.io.shoebox.Parser(stream) elif stream_type == poioapi.data.TOOLBOX: if not hasattr(stream, 'read'): stream = codecs.open(stream, "rb") parser = poioapi.io.toolbox.Parser(stream, mapper=ag.tier_mapper) elif stream_type == poioapi.data.ODIN: parser = poioapi.io.odin.Parser(stream) converter = poioapi.io.graf.GrAFConverter(parser) converter.parse() if stream_type == poioapi.data.ODIN: converter.meta_information = parser.metadata ag.tier_hierarchies = converter.tier_hierarchies ag.meta_information = converter.meta_information ag.root_tiers = converter.root_tiers ag.graf = converter.graf ag.primary_data = converter.primary_data ag.source_type = stream_type ag.structure_type_handler = poioapi.data.DataStructureType(ag.tier_hierarchies[0]) return ag def root_nodes(self): res = list() base_tier_name = self.structure_type_handler.flat_data_hierarchy[0] res = self.nodes_for_tier(base_tier_name) try: return sorted(res, key=lambda node: node.links[0][0].start) except IndexError as indexError: return res def nodes_for_tier(self, tier_name, parent_node = None): res = [] if parent_node: for target_node in parent_node.iter_children(): if target_node.id.startswith(tier_name + poioapi.io.graf.GRAFSEPARATOR): res.append(target_node) else: if tier_name in self.root_tiers: for target_node_id in self.graf.header.roots: n = self.graf.nodes[target_node_id] if n.id.startswith(tier_name + poioapi.io.graf.GRAFSEPARATOR): res.append(self.graf.nodes[target_node_id]) if len(res) == 0: for target_node in self.graf.nodes: if target_node.id.startswith(tier_name + poioapi.io.graf.GRAFSEPARATOR): res.append(target_node) return res def annotations_for_tier(self, tier_name, node=None): res = [] if node is not None and node.id.startswith(tier_name): for a in node.annotations: res.append(a) else: nodes = self.nodes_for_tier(tier_name, node) for n in nodes: for a in n.annotations: res.append(a) return res def annotation_value_for_annotation(self, annotation): annotation_value = "" try: annotation_value = annotation.features.get_value("annotation_value") except KeyError: pass return annotation_value def annotation_value_for_node(self, node): return self.annotation_value_for_annotation( node.annotations.get_first()) def as_html_table(self, filtered = False, full_html = True): html = "" if full_html: html = "<html><head><meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\" /></head><body>\n" for i, root_node in enumerate(self.root_nodes()): if filtered and (len(self.filtered_node_ids) == 0 or root_node.id not in self.filtered_node_ids[-1]): continue html += "<table style=\"border-collapse:collapse;border:1px solid black;margin-bottom:20px;\">" html += "<tr><td style=\"padding:4px;border:1px solid black;\">{0}</td>".format(i) html += "<td style=\"border:1px solid black;\">" html += self._node_as_html_table( root_node, self.structure_type_handler.data_hierarchy) html += "</td></tr></table>" if full_html: html += "</body></html>" return html
Apache License 2.0
threatconnect-inc/tcex
tcex/testing/validate_data.py
Validator.operator_keyvalue_eq
python
def operator_keyvalue_eq(self, app_data, test_data, **kwargs): if kwargs.get('exclude_keys') is not None: app_data = [ kv for kv in app_data if kv.get('key') not in kwargs.get('exclude_keys', []) ] test_data = [ kv for kv in test_data if kv.get('key') not in kwargs.get('exclude_keys', []) ] del kwargs['exclude_keys'] return self.operator_deep_diff(app_data, test_data, **kwargs)
Compare app data equals tests data. Args: app_data (dict|str|list): The data created by the App. test_data (dict|str|list): The data provided in the test case. Returns: bool: The results of the operator.
https://github.com/threatconnect-inc/tcex/blob/dae37b73d8b33cf26360f6d25c6b305a68f2f0e2/tcex/testing/validate_data.py#L541-L561
import datetime import difflib import gzip import hashlib import json import math import numbers import operator import os import random import re from collections import OrderedDict from typing import Optional, Union from urllib.parse import quote, unquote from ..utils import Utils class Validator: def __init__(self, tcex: object, log: object): self.log = log self.tcex = tcex self.tcex.logger.update_handler_level('error') self._redis = None self._threatconnect = None self.max_diff = 10 self.truncate = 500 self.utils = Utils() @staticmethod def _string_to_int_float(x: str) -> Union[float, int]: if isinstance(x, bytes): x = x.decode('utf-8') try: f = float(x) i = int(f) except TypeError: return x except ValueError: return x else: if f != i: return f return i def compare( self, app_data: Union[dict, list, str], test_data: Union[dict, list, str], op: Optional[str] = None, **kwargs, ) -> tuple: try: del kwargs['comment'] except KeyError: pass op = op or 'eq' if not self.get_operator(op): self.log.data( 'validate', 'Invalid Operator', f'Provided operator of {op} is invalid', 'error' ) return False title = kwargs.pop('title', app_data) self.log.title(title, '=') log_app_data = kwargs.pop('log_app_data', app_data) log_test_data = kwargs.pop('log_test_data', test_data) passed, details = self.get_operator(op)(app_data, test_data, **kwargs) self.validate_log_output(passed, log_app_data, log_test_data, details.strip(), op) assert_error = ( f'\n App Data : {app_data}\n' f' Operator : {op}\n' f' Expected Data: {test_data}\n' f' Details : {details}\n' ) return passed, assert_error def details(self, app_data, test_data, op): details = '' if app_data is not None and test_data is not None and op in ['eq', 'ne']: try: diff_count = 0 for i, diff in enumerate(difflib.ndiff(app_data, test_data)): if diff[0] == ' ': continue if diff[0] == '-': details += f'\n * Missing data at index {i}' elif diff[0] == '+': details += f'\n * Extra data at index {i}' if diff_count > self.max_diff: details += '\n * Max number of differences reached.' self.log.data( 'validate', 'Maximum Reached', 'Max number of differences reached.' ) break diff_count += 1 except TypeError: pass except KeyError: pass return details def get_operator(self, op): operators = { 'date_format': self.operator_date_format, 'df': self.operator_date_format, 'dd': self.operator_deep_diff, 'is_url': self.operator_is_url, 'is_date': self.operator_is_date, 'is_number': self.operator_is_number, 'is_json': self.operator_is_json, 'length_eq': self.operator_length_eq, 'leq': self.operator_length_eq, 'eq': self.operator_eq, '=': self.operator_eq, 'le': self.operator_le, '<=': self.operator_le, 'lt': self.operator_lt, '<': self.operator_lt, 'ge': self.operator_ge, '>=': self.operator_ge, 'gt': self.operator_gt, '>': self.operator_gt, 'heq': self.operator_hash_eq, 'hash_eq': self.operator_hash_eq, 'jeq': self.operator_json_eq, 'json_eq': self.operator_json_eq, 'kveq': self.operator_keyvalue_eq, 'keyvalue_eq': self.operator_keyvalue_eq, 'ne': self.operator_ne, '!=': self.operator_ne, 'rex': self.operator_regex_match, 'skip': self.operator_skip, } return operators.get(op, None) def operator_date_format(self, app_data, test_data): if self.check_null(test_data): return False, f'Invalid test_data: {test_data}. One or more values in test_data is null' if self.check_null(app_data): return False, f'Invalid app_data: {app_data}. One or more values in app_data is null' if not isinstance(app_data, list): app_data = [app_data] bad_data = [] passed = True for data in app_data: try: datetime.datetime.strptime(data, test_data) except ValueError: bad_data.append(data) passed = False return passed, ','.join(bad_data) @staticmethod def operator_deep_diff( app_data, test_data, **kwargs ): try: from deepdiff import DeepDiff except ImportError: return False, 'Could not import DeepDiff module (try "pip install deepdiff").' if (test_data is None or app_data is None) and app_data != test_data: return False, f'App Data {app_data} does not match Test Data {test_data}' safe_app_data = app_data if isinstance(app_data, OrderedDict): safe_app_data = json.loads(json.dumps(app_data)) elif isinstance(app_data, list): safe_app_data = [] for ad in app_data: if isinstance(ad, OrderedDict): ad = json.loads(json.dumps(ad)) safe_app_data.append(ad) safe_test_data = test_data if isinstance(test_data, OrderedDict): safe_test_data = json.loads(json.dumps(test_data)) elif isinstance(test_data, list): safe_test_data = [] for td in test_data: if isinstance(td, OrderedDict): td = json.loads(json.dumps(td)) safe_test_data.append(td) try: ddiff = DeepDiff(safe_app_data, safe_test_data, **kwargs) except KeyError: return False, 'Encountered KeyError when running deepdiff' except NameError: return False, 'Encountered NameError when running deepdiff' if ddiff: return False, str(ddiff) return True, '' def operator_eq(self, app_data, test_data): results = operator.eq(app_data, test_data) return results, self.details(app_data, test_data, 'eq') def operator_is_url(self, app_data, test_data): if self.check_null(app_data): return False, f'Invalid app_data: {app_data}. One or more values in app_data is null' if not isinstance(app_data, list): app_data = [app_data] bad_data = [] passed = True regex = re.compile( r'^(?:http|ftp)s?://' r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' r'localhost|' r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' r'(?::\d+)?' r'(?:/?|[/?]\S+)$', re.IGNORECASE, ) for data in app_data: try: matched = re.match(regex, data) if not matched: bad_data.append(data) passed = False except RuntimeError: bad_data.append(data) passed = False return passed, ','.join(bad_data) def operator_ge(self, app_data, test_data): if self.check_null(test_data): return False, f'Invalid test_data: {test_data}. One or more values in test_data is null' if self.check_null(app_data): return False, f'Invalid app_data: {app_data}. One or more values in app_data is null' app_data = self._string_to_int_float(app_data) test_data = self._string_to_int_float(test_data) results = operator.ge(app_data, test_data) details = '' if not results: details = f'{app_data} {type(app_data)} !(>=) {test_data} {type(test_data)}' return results, details def operator_gt(self, app_data, test_data): if self.check_null(test_data): return False, f'Invalid test_data: {test_data}. One or more values in test_data is null' if self.check_null(app_data): return False, f'Invalid app_data: {app_data}. One or more values in app_data is null' app_data = self._string_to_int_float(app_data) test_data = self._string_to_int_float(test_data) results = operator.gt(app_data, test_data) details = '' if not results: details = f'{app_data} {type(app_data)} !(>) {test_data} {type(test_data)}' return results, details def operator_is_date(self, app_data, test_data): if self.check_null(test_data): return False, f'Invalid test_data: {test_data}. One or more values in test_data is null' if self.check_null(app_data): return False, f'Invalid app_data: {app_data}. One or more values in app_data is null' if not isinstance(app_data, list): app_data = [app_data] bad_data = [] passed = True for data in app_data: try: self.tcex.utils.datetime.any_to_datetime(data) except RuntimeError: bad_data.append(data) passed = False return passed, ','.join(bad_data) @staticmethod def check_null(data_list): if not isinstance(data_list, list): data_list = [data_list] for data in data_list: if data is None: return True return False def operator_is_number(self, app_data, test_data): if self.check_null(app_data): return False, f'Invalid app_data: {app_data}. One or more values in app_data is null' if not isinstance(app_data, list): app_data = [app_data] bad_data = [] passed = True for data in app_data: if isinstance(data, str) and isinstance(self._string_to_int_float(data), (int, float)): continue if isinstance(data, numbers.Number): continue bad_data.append(data) passed = False return passed, ','.join(bad_data) def operator_is_json(self, app_data, test_data): if self.check_null(app_data): return False, f'Invalid app_data: {app_data}. One or more values in app_data is null' if not isinstance(app_data, list): app_data = [app_data] bad_data = [] for data in app_data: if isinstance(data, str): try: data = json.loads(data) if not isinstance(data, list): data = [data] for item in data: if not isinstance(item, dict): bad_data.append(f'Invalid JSON data provide ({item}).') except ValueError: print('failed to load data') bad_data.append(f'Invalid JSON data provide ({data}).') elif isinstance(data, (OrderedDict, dict)): try: data = json.dumps(data) except ValueError: bad_data.append(f'Invalid JSON data provide ({data}).') else: bad_data.append(f'Invalid JSON data provide ({data}).') if bad_data: return False, ','.join(bad_data) return True, ','.join(bad_data) @staticmethod def operator_hash_eq(app_data, test_data, **kwargs): if isinstance(app_data, (bytes, bytearray)): app_data_hash = hashlib.sha256(app_data).hexdigest() elif isinstance(app_data, str): encoding = kwargs.get('encoding', 'utf-8') app_data_hash = hashlib.sha256(app_data.encode(encoding)).hexdigest() else: return ( False, f'heq only supports Binary and String outputs, but app data was {type(app_data)}', ) if test_data != app_data_hash: return False, f'App Data is {app_data_hash} but Test Data is {test_data}' return True, '' @staticmethod def _load_json_data(data): if isinstance(data, (str)): return json.loads(data) if isinstance(data, (list)): data_updated = [] for ad in data: if isinstance(ad, (OrderedDict, dict)): ad = json.dumps(ad) try: ad = json.loads(ad) except Exception: pass data_updated.append(ad) return data_updated return data def operator_json_eq(self, app_data, test_data, **kwargs): if (test_data is None or app_data is None) and app_data != test_data: return False, f'App Data {app_data} does not match Test Data {test_data}' try: app_data = self._load_json_data(app_data) except ValueError: return False, f'Invalid JSON data provide ({app_data}).' try: test_data = self._load_json_data(test_data) except ValueError: return False, f'Invalid JSON data provide ({test_data}).' exclude = [] if 'exclude' in kwargs: exclude = kwargs.pop('exclude') if isinstance(app_data, list) and isinstance(test_data, list): app_data = [self.operator_json_eq_exclude(ad, exclude) for ad in app_data] test_data = [self.operator_json_eq_exclude(td, exclude) for td in test_data] elif isinstance(app_data, dict) and isinstance(test_data, dict): app_data = self.operator_json_eq_exclude(app_data, exclude) test_data = self.operator_json_eq_exclude(test_data, exclude) return self.operator_deep_diff(app_data, test_data, **kwargs) def operator_json_eq_exclude(self, data, exclude): for e in exclude: try: es = e.split('.') data = self.remove_excludes(data, es) except (AttributeError, KeyError, TypeError) as err: self.log.data( 'validate', 'Invalid Config', f'Invalid validation configuration: ({err})', 'error', ) return data
Apache License 2.0
geopython/pygeoapi
pygeoapi/api.py
APIRequest.with_data
python
def with_data(cls, request, supported_locales) -> 'APIRequest': api_req = cls(request, supported_locales) if hasattr(request, 'data'): api_req._data = request.data elif hasattr(request, 'body'): try: import nest_asyncio nest_asyncio.apply() loop = asyncio.get_event_loop() api_req._data = loop.run_until_complete(request.body()) except ModuleNotFoundError: LOGGER.error("Module nest-asyncio not found") return api_req
Factory class method to create an `APIRequest` instance with data. If the request body is required, an `APIRequest` should always be instantiated using this class method. The reason for this is, that the Starlette request body needs to be awaited (async), which cannot be achieved in the :meth:`__init__` method of the `APIRequest`. However, `APIRequest` can still be initialized using :meth:`__init__`, but then the :attr:`data` property value will always be empty. :param request: The web platform specific Request instance. :param supported_locales: List or set of supported Locale instances. :returns: An `APIRequest` instance with data.
https://github.com/geopython/pygeoapi/blob/90e43adf2b67ebfebfba65a8f073c105ad80019e/pygeoapi/api.py#L243-L275
import asyncio from collections import OrderedDict from copy import deepcopy from datetime import datetime, timezone from functools import partial import json import logging import os import re from typing import Any, Tuple, Union import urllib.parse import uuid from dateutil.parser import parse as dateparse import pytz from shapely.errors import WKTReadingError from shapely.wkt import loads as shapely_loads from pygeoapi import __version__, l10n from pygeoapi.formatter.base import FormatterSerializationError from pygeoapi.linked_data import (geojson2jsonld, jsonldify, jsonldify_collection) from pygeoapi.log import setup_logger from pygeoapi.process.base import ProcessorExecuteError from pygeoapi.plugin import load_plugin, PLUGINS from pygeoapi.provider.base import ( ProviderGenericError, ProviderConnectionError, ProviderNotFoundError, ProviderInvalidQueryError, ProviderNoDataError, ProviderQueryError, ProviderItemNotFoundError, ProviderTypeError) from pygeoapi.provider.tile import (ProviderTileNotFoundError, ProviderTileQueryError, ProviderTilesetIdNotFoundError) from pygeoapi.models.cql import CQLModel from pygeoapi.util import (dategetter, DATETIME_FORMAT, filter_dict_by_key_value, get_provider_by_type, get_provider_default, get_typed_value, JobStatus, json_serial, render_j2_template, str2bool, TEMPLATES, to_json) LOGGER = logging.getLogger(__name__) HEADERS = { 'Content-Type': 'application/json', 'X-Powered-By': 'pygeoapi {}'.format(__version__) } F_JSON = 'json' F_HTML = 'html' F_JSONLD = 'jsonld' FORMAT_TYPES = OrderedDict(( (F_HTML, 'text/html'), (F_JSONLD, 'application/ld+json'), (F_JSON, 'application/json'), )) SYSTEM_LOCALE = l10n.Locale('en', 'US') CONFORMANCE = [ 'http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/core', 'http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/collections', 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core', 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30', 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/html', 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson', 'http://www.opengis.net/spec/ogcapi_coverages-1/1.0/conf/core', 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/oas30', 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/html', 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/geodata-coverage', 'http://www.opengis.net/spec/ogcapi-tiles-1/1.0/conf/core', 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/core', 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/sorting', 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/opensearch', 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/json', 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/html', 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/ogc-process-description', 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/core', 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/json', 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/oas30', 'http://www.opengis.net/spec/ogcapi-edr-1/1.0/conf/core' ] OGC_RELTYPES_BASE = 'http://www.opengis.net/def/rel/ogc/1.0' def pre_process(func): def inner(*args): cls, req_in = args[:2] req_out = APIRequest.with_data(req_in, getattr(cls, 'locales', set())) if len(args) > 2: return func(cls, req_out, *args[2:]) else: return func(cls, req_out) return inner class APIRequest: def __init__(self, request, supported_locales): self._data = b'' self._args = self._get_params(request) if hasattr(request, 'scope'): self._path_info = request.scope['path'].strip('/') elif hasattr(request.headers, 'environ'): self._path_info = request.headers.environ['PATH_INFO'].strip('/') self._raw_locale, self._locale = self._get_locale(request.headers, supported_locales) self._format = self._get_format(request.headers) self._headers = self.get_request_headers(request.headers) @classmethod
MIT License
timercrack/pydatacoll
pydatacoll/utils/asteval/asteval.py
Interpreter.node_assign
python
def node_assign(self, node, val): if node.__class__ == ast.Name: if not valid_symbol_name(node.id): errmsg = "invalid symbol name (reserved word?) %s" % node.id self.raise_exception(node, exc=NameError, msg=errmsg) self.symtable[node.id] = val if node.id in self.no_deepcopy: self.no_deepcopy.remove(node.id) elif node.__class__ == ast.Attribute: if node.ctx.__class__ == ast.Load: msg = "cannot assign to attribute %s" % node.attr self.raise_exception(node, exc=AttributeError, msg=msg) setattr(self.run(node.value), node.attr, val) elif node.__class__ == ast.Subscript: sym = self.run(node.value) xslice = self.run(node.slice) if isinstance(node.slice, ast.Index): sym[xslice] = val elif isinstance(node.slice, ast.Slice): sym[slice(xslice.start, xslice.stop)] = val elif isinstance(node.slice, ast.ExtSlice): sym[xslice] = val elif node.__class__ in (ast.Tuple, ast.List): if len(val) == len(node.elts): for telem, tval in zip(node.elts, val): self.node_assign(telem, tval) else: raise ValueError('too many values to unpack')
here we assign a value (not the node.value object) to a node this is used by on_assign, but also by for, list comprehension, etc.
https://github.com/timercrack/pydatacoll/blob/fa5c221e8071ec8222eff9e11573d91cdc4b80d4/pydatacoll/utils/asteval/asteval.py#L375-L409
from __future__ import division, print_function from sys import exc_info, stdout, stderr, version_info import ast import math from time import time import sys from .astutils import (FROM_PY, FROM_MATH, FROM_NUMPY, UNSAFE_ATTRS, LOCALFUNCS, NUMPY_RENAMES, op2func, RECURSION_LIMIT, ExceptionHolder, ReturnedNone, valid_symbol_name) HAS_NUMPY = False try: import numpy HAS_NUMPY = True except ImportError: pass builtins = __builtins__ if not isinstance(builtins, dict): builtins = builtins.__dict__ MAX_EXEC_TIME = 2 class Interpreter: supported_nodes = ('arg', 'assert', 'assign', 'attribute', 'augassign', 'binop', 'boolop', 'break', 'call', 'compare', 'continue', 'delete', 'dict', 'ellipsis', 'excepthandler', 'expr', 'extslice', 'for', 'functiondef', 'if', 'ifexp', 'index', 'interrupt', 'list', 'listcomp', 'module', 'name', 'nameconstant', 'num', 'pass', 'print', 'raise', 'repr', 'return', 'slice', 'str', 'subscript', 'try', 'tuple', 'unaryop', 'while') def __init__(self, symtable=None, writer=None, use_numpy=True, err_writer=None, max_time=MAX_EXEC_TIME): self.writer = writer or stdout self.err_writer = err_writer or stderr self.start = 0 self.max_time = max_time self.old_recursion_limit = sys.getrecursionlimit() if symtable is None: symtable = {} self.symtable = symtable self._interrupt = None self.error = [] self.error_msg = None self.expr = None self.retval = None self.lineno = 0 self.use_numpy = HAS_NUMPY and use_numpy symtable['print'] = self._printer for sym in FROM_PY: if sym in builtins: symtable[sym] = builtins[sym] for symname, obj in LOCALFUNCS.items(): symtable[symname] = obj for sym in FROM_MATH: if hasattr(math, sym): symtable[sym] = getattr(math, sym) if self.use_numpy: for sym in FROM_NUMPY: if hasattr(numpy, sym): symtable[sym] = getattr(numpy, sym) for name, sym in NUMPY_RENAMES.items(): if hasattr(numpy, sym): symtable[name] = getattr(numpy, sym) self.node_handlers = dict(((node, getattr(self, "on_%s" % node)) for node in self.supported_nodes)) self.node_handlers['tryexcept'] = self.node_handlers['try'] self.node_handlers['tryfinally'] = self.node_handlers['try'] self.no_deepcopy = [] for key, val in symtable.items(): if callable(val) or 'numpy.lib.index_tricks' in repr(val): self.no_deepcopy.append(key) @staticmethod def set_recursion_limit(): sys.setrecursionlimit(RECURSION_LIMIT) def reset_recursion_limit(self): sys.setrecursionlimit(self.old_recursion_limit) def unimplemented(self, node): self.raise_exception(node, exc=NotImplementedError, msg="'%s' not supported" % node.__class__.__name__) def raise_exception(self, node, exc=None, msg='', expr=None, lineno=None): if self.error is None: self.error = [] if expr is None: expr = self.expr if self.error and not isinstance(node, ast.Module): msg = '%s' % msg err = ExceptionHolder(node, exc=exc, msg=msg, expr=expr, lineno=lineno) self._interrupt = ast.Break() self.error.append(err) if self.error_msg is None: self.error_msg = "%s in expr='%s'" % (msg, self.expr) elif msg: self.error_msg = "%s\n %s" % (self.error_msg, msg) if exc is None: try: exc = self.error[0].exc except: exc = RuntimeError raise exc(self.error_msg) def parse(self, text): self.expr = text try: self.set_recursion_limit() return ast.parse(text) except SyntaxError: self.raise_exception(None, msg='Syntax Error', expr=text) except: self.raise_exception(None, msg='Runtime Error', expr=text) finally: self.reset_recursion_limit() def run(self, node, expr=None, lineno=None, with_raise=True): if time() - self.start > self.max_time: raise RuntimeError("Execution exceeded time limit, max runtime is {}s".format(MAX_EXEC_TIME)) if self.error: return if node is None: return if isinstance(node, str): node = self.parse(node) if lineno is not None: self.lineno = lineno if expr is not None: self.expr = expr try: handler = self.node_handlers[node.__class__.__name__.lower()] except KeyError: return self.unimplemented(node) try: ret = handler(node) if isinstance(ret, enumerate): ret = list(ret) return ret except: if with_raise: self.raise_exception(node, expr=expr) def __call__(self, expr, **kw): return self.eval(expr, **kw) def eval(self, expr, lineno=0, show_errors=True): self.lineno = lineno self.error = [] self.start = time() try: try: self.set_recursion_limit() node = self.parse(expr) except: errmsg = exc_info()[1] if self.error: errmsg = "\n".join(self.error[0].get_error()) if not show_errors: try: exc = self.error[0].exc except: exc = RuntimeError raise exc(errmsg) print(errmsg, file=self.err_writer) return try: self.set_recursion_limit() return self.run(node, expr=expr, lineno=lineno) except: errmsg = exc_info()[1] if self.error: errmsg = "\n".join(self.error[0].get_error()) if not show_errors: try: exc = self.error[0].exc except: exc = RuntimeError raise exc(errmsg) print(errmsg, file=self.err_writer) return finally: self.reset_recursion_limit() @staticmethod def dump(node, **kw): return ast.dump(node, **kw) def on_expr(self, node): return self.run(node.value) def on_index(self, node): return self.run(node.value) def on_return(self, node): self.retval = self.run(node.value) if self.retval is None: self.retval = ReturnedNone return def on_repr(self, node): return repr(self.run(node.value)) def on_module(self, node): out = None for tnode in node.body: out = self.run(tnode) return out def on_pass(self, node): return None def on_ellipsis(self, node): return Ellipsis def on_interrupt(self, node): self._interrupt = node return node def on_break(self, node): return self.on_interrupt(node) def on_continue(self, node): return self.on_interrupt(node) def on_assert(self, node): if not self.run(node.test): self.raise_exception(node, exc=AssertionError, msg=node.msg) return True def on_list(self, node): return [self.run(e) for e in node.elts] def on_tuple(self, node): return tuple(self.on_list(node)) def on_dict(self, node): return dict([(self.run(k), self.run(v)) for k, v in zip(node.keys, node.values)]) def on_num(self, node): return node.n def on_str(self, node): return node.s def on_name(self, node): ctx = node.ctx.__class__ if ctx in (ast.Param, ast.Del): return str(node.id) else: if node.id in self.symtable: return self.symtable[node.id] else: msg = "name '%s' is not defined" % node.id self.raise_exception(node, exc=NameError, msg=msg) def on_nameconstant(self, node): return node.value
Apache License 2.0
deniscarriere/geocoder
geocoder/base.py
MultipleResultsQuery._before_initialize
python
def _before_initialize(self, location, **kwargs): pass
Can be overridden to finalize setup before the query
https://github.com/deniscarriere/geocoder/blob/39b9999ec70e61da9fa52fe9fe82a261ad70fa8b/geocoder/base.py#L451-L453
from __future__ import absolute_import, print_function from builtins import str import requests import sys import json import six import logging from io import StringIO from collections import OrderedDict is_python2 = sys.version_info < (3, 0) if is_python2: from urlparse import urlparse class MutableSequence(object): def index(self, v, **kwargs): return self._list.index(v, **kwargs) def count(self, v): return self._list.count(v) def pop(self, i=-1): return self._list.pop(i) def remove(self, v): self._list.remove(v) def __iter__(self): return iter(self._list) def __contains__(self, v): return self._list.__contains__(v) def __eq__(self, other): return self._list == other else: from collections.abc import MutableSequence from urllib.parse import urlparse from geocoder.distance import Distance LOGGER = logging.getLogger(__name__) class OneResult(object): _TO_EXCLUDE = ['parse', 'json', 'url', 'fieldnames', 'help', 'debug', 'short_name', 'api', 'content', 'params', 'street_number', 'api_key', 'key', 'id', 'x', 'y', 'latlng', 'headers', 'timeout', 'wkt', 'locality', 'province', 'rate_limited_get', 'osm', 'route', 'schema', 'properties', 'geojson', 'tree', 'error', 'proxies', 'road', 'xy', 'northeast', 'northwest', 'southeast', 'southwest', 'road_long', 'city_long', 'state_long', 'country_long', 'postal_town_long', 'province_long', 'road_long', 'street_long', 'interpolated', 'method', 'geometry', 'session'] def __init__(self, json_content): self.raw = json_content self.northeast = [] self.northwest = [] self.southeast = [] self.southwest = [] self.fieldnames = [] self.json = {} self._parse_json_with_fieldnames() @property def lat(self): return '' @property def lng(self): return '' @property def accuracy(self): return '' @property def quality(self): return '' @property def bbox(self): return {} @property def address(self): return '' @property def housenumber(self): return '' @property def street(self): return '' @property def city(self): return '' @property def state(self): return '' @property def country(self): return '' @property def postal(self): return '' def __repr__(self): if self.address: return u'[{0}]'.format(six.text_type(self.address)) else: return u'[{0},{1}]'.format(self.lat, self.lng) def _parse_json_with_fieldnames(self): for key in dir(self): if not key.startswith('_') and key not in self._TO_EXCLUDE: self.fieldnames.append(key) value = getattr(self, key) if value: self.json[key] = value self.json['ok'] = self.ok @property def ok(self): return bool(self.lng and self.lat) @property def status(self): if self.ok: return 'OK' if not self.address: return 'ERROR - No results found' return 'ERROR - No Geometry' def debug(self, verbose=True): with StringIO() as output: print(u'\n', file=output) print(u'From provider\n', file=output) print(u'-----------\n', file=output) print(str(json.dumps(self.raw, indent=4)), file=output) print(u'\n', file=output) print(u'Cleaned json\n', file=output) print(u'-----------\n', file=output) print(str(json.dumps(self.json, indent=4)), file=output) print(u'\n', file=output) print(u'OSM Quality\n', file=output) print(u'-----------\n', file=output) osm_count = 0 for key in self.osm: if 'addr:' in key: if self.json.get(key.replace('addr:', '')): print(u'- [x] {0}\n'.format(key), file=output) osm_count += 1 else: print(u'- [ ] {0}\n'.format(key), file=output) print(u'({0}/{1})\n'.format(osm_count, len(self.osm) - 2), file=output) print(u'\n', file=output) print(u'Fieldnames\n', file=output) print(u'----------\n', file=output) fields_count = 0 for fieldname in self.fieldnames: if self.json.get(fieldname): print(u'- [x] {0}\n'.format(fieldname), file=output) fields_count += 1 else: print(u'- [ ] {0}\n'.format(fieldname), file=output) print(u'({0}/{1})\n'.format(fields_count, len(self.fieldnames)), file=output) if verbose: print(output.getvalue()) return [osm_count, fields_count] def _get_bbox(self, south, west, north, east): if all([south, east, north, west]): self.south = float(south) self.west = float(west) self.north = float(north) self.east = float(east) self.northeast = [self.north, self.east] self.northwest = [self.north, self.west] self.southwest = [self.south, self.west] self.southeast = [self.south, self.east] self.westsouth = [self.west, self.south] self.eastnorth = [self.east, self.north] return dict(northeast=self.northeast, southwest=self.southwest) return {} @property def confidence(self): if self.bbox: distance = Distance(self.northeast, self.southwest, units='km') for score, maximum in [(10, 0.25), (9, 0.5), (8, 1), (7, 5), (6, 7.5), (5, 10), (4, 15), (3, 20), (2, 25)]: if distance < maximum: return score if distance >= 25: return 1 return 0 @property def geometry(self): if self.ok: return { 'type': 'Point', 'coordinates': [self.x, self.y]} return {} @property def osm(self): osm = dict() if self.ok: osm['x'] = self.x osm['y'] = self.y if self.housenumber: osm['addr:housenumber'] = self.housenumber if self.road: osm['addr:street'] = self.road if self.city: osm['addr:city'] = self.city if self.state: osm['addr:state'] = self.state if self.country: osm['addr:country'] = self.country if self.postal: osm['addr:postal'] = self.postal if hasattr(self, 'population'): if self.population: osm['population'] = self.population return osm @property def geojson(self): feature = { 'type': 'Feature', 'properties': self.json, } if self.bbox: feature['bbox'] = [self.west, self.south, self.east, self.north] feature['properties']['bbox'] = feature['bbox'] if self.geometry: feature['geometry'] = self.geometry return feature @property def wkt(self): if self.ok: return 'POINT({x} {y})'.format(x=self.x, y=self.y) return '' @property def xy(self): if self.ok: return [self.lng, self.lat] return [] @property def latlng(self): if self.ok: return [self.lat, self.lng] return [] @property def y(self): return self.lat @property def x(self): return self.lng @property def locality(self): return self.city @property def province(self): return self.state @property def street_number(self): return self.housenumber @property def road(self): return self.street @property def route(self): return self.street class MultipleResultsQuery(MutableSequence): _URL = None _RESULT_CLASS = None _KEY = None _KEY_MANDATORY = True _TIMEOUT = 5.0 @staticmethod def _is_valid_url(url): try: parsed = urlparse(url) mandatory_parts = [parsed.scheme, parsed.netloc] return all(mandatory_parts) except: return False @classmethod def _is_valid_result_class(cls): return issubclass(cls._RESULT_CLASS, OneResult) @classmethod def _get_api_key(cls, key=None): key = key or cls._KEY if not key and cls._KEY_MANDATORY: raise ValueError('Provide API Key') return key def __init__(self, location, **kwargs): super(MultipleResultsQuery, self).__init__() self._list = [] if not self._is_valid_url(self._URL): raise ValueError("Subclass must define a valid URL. Got %s", self._URL) self.url = kwargs.get('url', self._URL) or self._URL if not self._is_valid_url(self.url): raise ValueError("url not valid. Got %s", self.url) if not self._is_valid_result_class(): raise ValueError( "Subclass must define _RESULT_CLASS from 'OneResult'. Got %s", self._RESULT_CLASS) self.one_result = self._RESULT_CLASS provider_key = self._get_api_key(kwargs.pop('key', '')) self.location = location self.encoding = kwargs.get('encoding', 'utf-8') self.timeout = kwargs.get('timeout', self._TIMEOUT) self.proxies = kwargs.get('proxies', '') self.session = kwargs.get('session', requests.Session()) self.headers = self._build_headers(provider_key, **kwargs).copy() self.headers.update(kwargs.get('headers', {})) self.params = OrderedDict(self._build_params(location, provider_key, **kwargs)) self.params.update(kwargs.get('params', {})) self.status_code = None self.response = None self.error = False self.current_result = None self._before_initialize(location, **kwargs) self._initialize() def __getitem__(self, key): return self._list[key] def __setitem__(self, key, value): self._list[key] = value def __delitem__(self, key): del self._list[key] def __len__(self): return len(self._list) def insert(self, index, value): self._list.insert(index, value) def add(self, value): self._list.append(value) def __repr__(self): base_repr = u'<[{0}] {1} - {2} {{0}}>'.format( self.status, self.provider.title(), self.method.title() ) if len(self) == 0: return base_repr.format(u'[empty]') elif len(self) == 1: return base_repr.format(repr(self[0])) else: return base_repr.format(u'#%s results' % len(self)) def _build_headers(self, provider_key, **kwargs): return {} def _build_params(self, location, provider_key, **kwargs): return {}
MIT License
shannonai/fast-knn-nmt
thirdparty/fairseq/fairseq/search.py
Search.init_constraints
python
def init_constraints(self, batch_constraints: Optional[Tensor], beam_size: int): pass
Initialize constraint states for constrained decoding (if supported). Args: batch_constraints: (torch.Tensor, optional) the list of constraints, in packed form beam_size: (int) the beam size Returns: *encoder_out* rearranged according to *new_order*
https://github.com/shannonai/fast-knn-nmt/blob/27bbdd967befe06bfbfde11ab9cfa34b4aa46482/thirdparty/fairseq/fairseq/search.py#L65-L76
import math from typing import List, Optional import torch import torch.nn as nn from fairseq.token_generation_constraints import ( ConstraintState, OrderedConstraintState, UnorderedConstraintState, ) from torch import Tensor class Search(nn.Module): def __init__(self, tgt_dict): super().__init__() self.pad = tgt_dict.pad() self.unk = tgt_dict.unk() self.eos = tgt_dict.eos() self.vocab_size = len(tgt_dict) self.src_lengths = torch.tensor(-1) self.supports_constraints = False self.stop_on_max_len = False def step( self, step, lprobs, scores, prev_output_tokens=None, original_batch_idxs=None ): raise NotImplementedError @torch.jit.export def set_src_lengths(self, src_lengths): self.src_lengths = src_lengths @torch.jit.export
Apache License 2.0
mozilla/moztrap
moztrap/model/mtadmin.py
MTInlineFormSet.save
python
def save(self, *args, **kwargs): self.user = kwargs.pop("user", None) return super(MTInlineFormSet, self).save(*args, **kwargs)
Save model instances for each form in the formset.
https://github.com/mozilla/moztrap/blob/93b34a4cd21c9e08f73d3b1a7630cd873f8418a0/moztrap/model/mtadmin.py#L198-L202
from itertools import chain from functools import partial from django.conf import settings from django.forms.models import BaseInlineFormSet from django.shortcuts import redirect from django.views.decorators.cache import never_cache from django.contrib import admin, messages from django.contrib.admin import actions from django.contrib.admin.util import flatten_fieldsets from django.contrib.auth import REDIRECT_FIELD_NAME from django.contrib.auth.views import redirect_to_login from moztrap.view.utils.mtforms import MTModelForm class MTAdminSite(admin.AdminSite): @never_cache def login(self, request, extra_context=None): if request.user.is_authenticated(): messages.warning( request, "Your account does not have permissions to access that page. " "Please log in with a different account, or visit a different " "page. " ) return redirect_to_login( request.get_full_path(), settings.LOGIN_URL, REDIRECT_FIELD_NAME, ) @never_cache def logout(self, request, extra_context=None): return redirect("home") site = MTAdminSite() class MTModelAdmin(admin.ModelAdmin): list_display = ["__unicode__", "deleted_on", "deleted_by"] readonly_fields = [ "created_on", "created_by", "modified_on", "modified_by", "deleted_on", "deleted_by", "cc_version", ] actions = ["delete", "undelete", "delete_selected"] def delete(self, request, queryset): queryset.delete(user=request.user) delete.short_description = ( u"Delete selected %(verbose_name_plural)s") def undelete(self, request, queryset): queryset.undelete(user=request.user) undelete.short_description = ( u"Undelete selected %(verbose_name_plural)s") def delete_selected(self, request, queryset): queryset.delete = partial(queryset.delete, permanent=True) return actions.delete_selected(self, request, queryset) delete_selected.short_description = ( u"PERMANENTLY delete selected %(verbose_name_plural)s") def save_model(self, request, obj, form, change): obj.save(user=request.user) def save_formset(self, request, form, formset, change): if isinstance(formset, MTInlineFormSet): formset.save(user=request.user) else: formset.save() def delete_model(self, request, obj): obj.delete(user=request.user) def get_fieldsets(self, *args, **kwargs): meta_fields = [ ("cc_version",), ("created_on", "created_by"), ("modified_on", "modified_by"), ] delete_fields = [ ("deleted_on", "deleted_by"), ] fieldsets = super(MTModelAdmin, self).get_fieldsets( *args, **kwargs)[:] if not self.declared_fieldsets: metadata_fields = set( chain.from_iterable(chain(meta_fields, delete_fields))) fieldsets[0][1]["fields"] = [ field for field in fieldsets[0][1]["fields"] if field not in metadata_fields ] fieldsets.extend([ ("Deletion", {"fields": delete_fields}), ("Meta", {"fields": meta_fields, "classes": ["collapse"]}) ]) return fieldsets class TeamModelAdmin(MTModelAdmin): def get_fieldsets(self, *args, **kwargs): team_fields = [("has_team", "own_team")] fieldsets = super(TeamModelAdmin, self).get_fieldsets( *args, **kwargs)[:] if not self.declared_fieldsets: metadata_fields = set(chain.from_iterable(team_fields)) fieldsets[0][1]["fields"] = [ field for field in fieldsets[0][1]["fields"] if field not in metadata_fields ] fieldsets.insert(-2, ("Team", {"fields": team_fields})) return fieldsets def get_form(self, *args, **kwargs): if self.declared_fieldsets: kwargs["fields"] = flatten_fieldsets( self.declared_fieldsets) + ["has_team", "own_team"] return super(TeamModelAdmin, self).get_form(*args, **kwargs) class MTInlineFormSet(BaseInlineFormSet):
BSD 2-Clause Simplified License
kylebebak/questionnaire
questionnaire/prompters.py
many
python
def many(prompt, *args, **kwargs): def get_options(options, chosen): return [options[i] for i, c in enumerate(chosen) if c] def get_verbose_options(verbose_options, chosen): no, yes = ' ', '✔' if sys.version_info < (3, 3): no, yes = ' ', '@' opts = ['{} {}'.format(yes if c else no, verbose_options[i]) for i, c in enumerate(chosen)] return opts + ['{}{}'.format(' ', kwargs.get('done', 'done...'))] options, verbose_options = prepare_options(args) chosen = [False] * len(options) index = kwargs.get('idx', 0) default = kwargs.get('default', None) if isinstance(default, list): for idx in default: chosen[idx] = True if isinstance(default, int): chosen[default] = True while True: try: index = one(prompt, *get_verbose_options(verbose_options, chosen), return_index=True, idx=index) except QuestionnaireGoBack: if any(chosen): raise QuestionnaireGoBack(0) else: raise QuestionnaireGoBack if index == len(options): return get_options(options, chosen) chosen[index] = not chosen[index]
Calls `pick` in a while loop to allow user to pick many options. Returns a list of chosen options.
https://github.com/kylebebak/questionnaire/blob/ed92642e8a2a0198da198acbcde2707f1d528585/questionnaire/prompters.py#L74-L109
from __future__ import print_function import sys import curses import os import getpass from contextlib import contextmanager from pick import Picker prompters = {} class QuestionnaireGoBack(Exception): def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def is_string(thing): if sys.version_info < (3, 0): return isinstance(thing, basestring) return isinstance(thing, str) def register(key='function'): def decorate(func): prompters[key] = func return func return decorate @register(key='one') def one(prompt, *args, **kwargs): indicator = '‣' if sys.version_info < (3, 0): indicator = '>' def go_back(picker): return None, -1 options, verbose_options = prepare_options(args) idx = kwargs.get('idx', 0) picker = Picker(verbose_options, title=prompt, indicator=indicator, default_index=idx) picker.register_custom_handler(ord('h'), go_back) picker.register_custom_handler(curses.KEY_LEFT, go_back) with stdout_redirected(sys.stderr): option, index = picker.start() if index == -1: raise QuestionnaireGoBack if kwargs.get('return_index', False): return index return options[index] @register(key='many')
MIT License
bitmovin/bitmovin-api-sdk-python
bitmovin_api_sdk/models/watch_folder_input.py
WatchFolderInput.input_id
python
def input_id(self): return self._input_id
Gets the input_id of this WatchFolderInput. Input id (required) :return: The input_id of this WatchFolderInput. :rtype: string_types
https://github.com/bitmovin/bitmovin-api-sdk-python/blob/79dd938804197151af7cbe5501c7ec1d97872c15/bitmovin_api_sdk/models/watch_folder_input.py#L44-L53
from enum import Enum from six import string_types, iteritems from bitmovin_api_sdk.common.poscheck import poscheck_model import pprint import six class WatchFolderInput(object): @poscheck_model def __init__(self, input_id=None, input_path=None): self._input_id = None self._input_path = None self.discriminator = None if input_id is not None: self.input_id = input_id if input_path is not None: self.input_path = input_path @property def openapi_types(self): types = { 'input_id': 'string_types', 'input_path': 'string_types' } return types @property def attribute_map(self): attributes = { 'input_id': 'inputId', 'input_path': 'inputPath' } return attributes @property
MIT License
hetznercloud/hcloud-python
hcloud/servers/client.py
BoundServer.power_on
python
def power_on(self): return self._client.power_on(self)
Starts a server by turning its power on. :return: :class:`BoundAction <hcloud.actions.client.BoundAction>`
https://github.com/hetznercloud/hcloud-python/blob/fad870790a19621fd130fd28d564a8d6ba7a566c/hcloud/servers/client.py#L170-L176
from hcloud.core.client import ClientEntityBase, BoundModelBase, GetEntityByNameMixin from hcloud.actions.client import BoundAction from hcloud.core.domain import add_meta_to_result from hcloud.firewalls.client import BoundFirewall from hcloud.floating_ips.client import BoundFloatingIP from hcloud.isos.client import BoundIso from hcloud.servers.domain import ( Server, CreateServerResponse, ResetPasswordResponse, EnableRescueResponse, RequestConsoleResponse, PublicNetwork, IPv4Address, IPv6Network, PrivateNet, PublicNetworkFirewall, ) from hcloud.volumes.client import BoundVolume from hcloud.images.domain import CreateImageResponse from hcloud.images.client import BoundImage from hcloud.server_types.client import BoundServerType from hcloud.datacenters.client import BoundDatacenter from hcloud.networks.client import BoundNetwork from hcloud.networks.domain import Network from hcloud.placement_groups.client import BoundPlacementGroup class BoundServer(BoundModelBase): model = Server def __init__(self, client, data, complete=True): datacenter = data.get("datacenter") if datacenter is not None: data["datacenter"] = BoundDatacenter(client._client.datacenters, datacenter) volumes = data.get("volumes", []) if volumes: volumes = [ BoundVolume(client._client.volumes, {"id": volume}, complete=False) for volume in volumes ] data["volumes"] = volumes image = data.get("image", None) if image is not None: data["image"] = BoundImage(client._client.images, image) iso = data.get("iso", None) if iso is not None: data["iso"] = BoundIso(client._client.isos, iso) server_type = data.get("server_type") if server_type is not None: data["server_type"] = BoundServerType( client._client.server_types, server_type ) public_net = data.get("public_net") if public_net: ipv4_address = IPv4Address.from_dict(public_net["ipv4"]) ipv6_network = IPv6Network.from_dict(public_net["ipv6"]) floating_ips = [ BoundFloatingIP( client._client.floating_ips, {"id": floating_ip}, complete=False ) for floating_ip in public_net["floating_ips"] ] firewalls = [ PublicNetworkFirewall( BoundFirewall( client._client.firewalls, {"id": firewall["id"]}, complete=False ), status=firewall["status"], ) for firewall in public_net.get("firewalls", []) ] data["public_net"] = PublicNetwork( ipv4=ipv4_address, ipv6=ipv6_network, floating_ips=floating_ips, firewalls=firewalls, ) private_nets = data.get("private_net") if private_nets: private_nets = [ PrivateNet( network=BoundNetwork( client._client.networks, {"id": private_net["network"]}, complete=False, ), ip=private_net["ip"], alias_ips=private_net["alias_ips"], mac_address=private_net["mac_address"], ) for private_net in private_nets ] data["private_net"] = private_nets placement_group = data.get("placement_group") if placement_group: placement_group = BoundPlacementGroup( client._client.placement_groups, placement_group ) data["placement_group"] = placement_group super(BoundServer, self).__init__(client, data, complete) def get_actions_list(self, status=None, sort=None, page=None, per_page=None): return self._client.get_actions_list(self, status, sort, page, per_page) def get_actions(self, status=None, sort=None): return self._client.get_actions(self, status, sort) def update(self, name=None, labels=None): return self._client.update(self, name, labels) def delete(self): return self._client.delete(self) def power_off(self): return self._client.power_off(self)
MIT License
osmr/imgclsmob
chainer_/chainercv2/models/vgg.py
bn_vgg19b
python
def bn_vgg19b(**kwargs): return get_vgg(blocks=19, use_bias=True, use_bn=True, model_name="bn_vgg19b", **kwargs)
VGG-19 model with batch normalization and biases in convolution layers from 'Very Deep Convolutional Networks for Large-Scale Image Recognition,' https://arxiv.org/abs/1409.1556. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.chainer/models' Location for keeping the model parameters.
https://github.com/osmr/imgclsmob/blob/ea5f784eea865ce830f3f97c5c1d1f6491d9cbb2/chainer_/chainercv2/models/vgg.py#L376-L388
__all__ = ['VGG', 'vgg11', 'vgg13', 'vgg16', 'vgg19', 'bn_vgg11', 'bn_vgg13', 'bn_vgg16', 'bn_vgg19', 'bn_vgg11b', 'bn_vgg13b', 'bn_vgg16b', 'bn_vgg19b'] import os import chainer.functions as F import chainer.links as L from chainer import Chain from functools import partial from chainer.serializers import load_npz from .common import conv3x3_block, SimpleSequential class VGGDense(Chain): def __init__(self, in_channels, out_channels): super(VGGDense, self).__init__() with self.init_scope(): self.fc = L.Linear( in_size=in_channels, out_size=out_channels) self.activ = F.relu self.dropout = partial( F.dropout, ratio=0.5) def __call__(self, x): x = self.fc(x) x = self.activ(x) x = self.dropout(x) return x class VGGOutputBlock(Chain): def __init__(self, in_channels, classes): super(VGGOutputBlock, self).__init__() mid_channels = 4096 with self.init_scope(): self.fc1 = VGGDense( in_channels=in_channels, out_channels=mid_channels) self.fc2 = VGGDense( in_channels=mid_channels, out_channels=mid_channels) self.fc3 = L.Linear( in_size=mid_channels, out_size=classes) def __call__(self, x): x = self.fc1(x) x = self.fc2(x) x = self.fc3(x) return x class VGG(Chain): def __init__(self, channels, use_bias=True, use_bn=False, in_channels=3, in_size=(224, 224), classes=1000): super(VGG, self).__init__() self.in_size = in_size self.classes = classes with self.init_scope(): self.features = SimpleSequential() with self.features.init_scope(): for i, channels_per_stage in enumerate(channels): stage = SimpleSequential() with stage.init_scope(): for j, out_channels in enumerate(channels_per_stage): setattr(stage, "unit{}".format(j + 1), conv3x3_block( in_channels=in_channels, out_channels=out_channels, use_bias=use_bias, use_bn=use_bn)) in_channels = out_channels setattr(stage, "pool{}".format(i + 1), partial( F.max_pooling_2d, ksize=2, stride=2, pad=0)) setattr(self.features, "stage{}".format(i + 1), stage) in_channels = in_channels * 7 * 7 self.output = SimpleSequential() with self.output.init_scope(): setattr(self.output, "flatten", partial( F.reshape, shape=(-1, in_channels))) setattr(self.output, "classifier", VGGOutputBlock( in_channels=in_channels, classes=classes)) def __call__(self, x): x = self.features(x) x = self.output(x) return x def get_vgg(blocks, use_bias=True, use_bn=False, model_name=None, pretrained=False, root=os.path.join("~", ".chainer", "models"), **kwargs): if blocks == 11: layers = [1, 1, 2, 2, 2] elif blocks == 13: layers = [2, 2, 2, 2, 2] elif blocks == 16: layers = [2, 2, 3, 3, 3] elif blocks == 19: layers = [2, 2, 4, 4, 4] else: raise ValueError("Unsupported VGG with number of blocks: {}".format(blocks)) channels_per_layers = [64, 128, 256, 512, 512] channels = [[ci] * li for (ci, li) in zip(channels_per_layers, layers)] net = VGG( channels=channels, use_bias=use_bias, use_bn=use_bn, **kwargs) if pretrained: if (model_name is None) or (not model_name): raise ValueError("Parameter `model_name` should be properly initialized for loading pretrained model.") from .model_store import get_model_file load_npz( file=get_model_file( model_name=model_name, local_model_store_dir_path=root), obj=net) return net def vgg11(**kwargs): return get_vgg(blocks=11, model_name="vgg11", **kwargs) def vgg13(**kwargs): return get_vgg(blocks=13, model_name="vgg13", **kwargs) def vgg16(**kwargs): return get_vgg(blocks=16, model_name="vgg16", **kwargs) def vgg19(**kwargs): return get_vgg(blocks=19, model_name="vgg19", **kwargs) def bn_vgg11(**kwargs): return get_vgg(blocks=11, use_bias=False, use_bn=True, model_name="bn_vgg11", **kwargs) def bn_vgg13(**kwargs): return get_vgg(blocks=13, use_bias=False, use_bn=True, model_name="bn_vgg13", **kwargs) def bn_vgg16(**kwargs): return get_vgg(blocks=16, use_bias=False, use_bn=True, model_name="bn_vgg16", **kwargs) def bn_vgg19(**kwargs): return get_vgg(blocks=19, use_bias=False, use_bn=True, model_name="bn_vgg19", **kwargs) def bn_vgg11b(**kwargs): return get_vgg(blocks=11, use_bias=True, use_bn=True, model_name="bn_vgg11b", **kwargs) def bn_vgg13b(**kwargs): return get_vgg(blocks=13, use_bias=True, use_bn=True, model_name="bn_vgg13b", **kwargs) def bn_vgg16b(**kwargs): return get_vgg(blocks=16, use_bias=True, use_bn=True, model_name="bn_vgg16b", **kwargs)
MIT License
openvinotoolkit/datumaro
datumaro/cli/util/project.py
generate_next_file_name
python
def generate_next_file_name(basename, basedir='.', sep='.', ext=''): return generate_next_name(os.listdir(basedir), basename, sep, ext)
If basedir does not contain basename, returns basename, otherwise generates a name by appending sep to the basename and the number, next to the last used number in the basedir for files with basename prefix. Optionally, appends ext.
https://github.com/openvinotoolkit/datumaro/blob/e3d011a9fb1dcdc660fff0ff4fdf56206c1cd4bf/datumaro/cli/util/project.py#L21-L29
from typing import Optional, Tuple import os import re from datumaro.cli.util.errors import WrongRevpathError from datumaro.components.dataset import Dataset from datumaro.components.environment import Environment from datumaro.components.errors import DatumaroError, ProjectNotFoundError from datumaro.components.project import Project, Revision from datumaro.util.os_util import generate_next_name from datumaro.util.scope import on_error_do, scoped def load_project(project_dir, readonly=False): return Project(project_dir, readonly=readonly)
MIT License
icb-dcm/pyabc
pyabc/transition/util.py
smart_cov
python
def smart_cov(X_arr: np.ndarray, w: np.ndarray) -> np.ndarray: if X_arr.shape[0] == 1: cov_diag = X_arr[0] cov = np.diag(np.absolute(cov_diag)) return cov cov = np.cov(X_arr, aweights=w, rowvar=False) cov = np.atleast_2d(cov) return cov
Create sample covariance matrix. Also returns a covariance if X_arr consists of only one single sample
https://github.com/icb-dcm/pyabc/blob/3cef3237a819caba40efe6eb4f775822b4d66955/pyabc/transition/util.py#L4-L16
import numpy as np
BSD 3-Clause New or Revised License
nuagenetworks/vspk-python
vspk/v5_0/nutca.py
NUTCA.status
python
def status(self, value): self._status = value
Set status value. Notes: This flag is used to indicate the status of TCA
https://github.com/nuagenetworks/vspk-python/blob/375cce10ae144ad6017104e57fcd3630898cc2a6/vspk/v5_0/nutca.py#L629-L637
from .fetchers import NUMetadatasFetcher from .fetchers import NUAlarmsFetcher from .fetchers import NUGlobalMetadatasFetcher from .fetchers import NUEventLogsFetcher from bambou import NURESTObject class NUTCA(NURESTObject): __rest_name__ = "tca" __resource_name__ = "tcas" CONST_METRIC_PACKETS_OUT_DROPPED = "PACKETS_OUT_DROPPED" CONST_METRIC_Q4_BYTES = "Q4_BYTES" CONST_METRIC_FIP_RATE_LIMIT_DROPPED_PACKETS = "FIP_RATE_LIMIT_DROPPED_PACKETS" CONST_METRIC_TX_ERRORS = "TX_ERRORS" CONST_METRIC_RX_BYTES = "RX_BYTES" CONST_TYPE_BREACH = "BREACH" CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE" CONST_METRIC_TCP_FLAG_RST_IN = "TCP_FLAG_RST_IN" CONST_METRIC_RX_DROPPED = "RX_DROPPED" CONST_METRIC_Q3_PKT_COUNT = "Q3_PKT_COUNT" CONST_METRIC_TCP_FLAG_ACK_OUT = "TCP_FLAG_ACK_OUT" CONST_METRIC_TCP_FLAG_ACK_IN = "TCP_FLAG_ACK_IN" CONST_METRIC_EGRESS_PACKET_COUNT = "EGRESS_PACKET_COUNT" CONST_METRIC_Q2_DROPPED = "Q2_DROPPED" CONST_METRIC_PACKETS_IN_DROPPED = "PACKETS_IN_DROPPED" CONST_METRIC_L7_BYTES_OUT = "L7_BYTES_OUT" CONST_TYPE_ROLLING_AVERAGE = "ROLLING_AVERAGE" CONST_METRIC_FIP_RATE_LIMIT_DROPPED_BYTES = "FIP_RATE_LIMIT_DROPPED_BYTES" CONST_METRIC_PACKETS_DROPPED_BY_RATE_LIMIT = "PACKETS_DROPPED_BY_RATE_LIMIT" CONST_METRIC_Q1_BYTES = "Q1_BYTES" CONST_METRIC_Q3_BYTES = "Q3_BYTES" CONST_METRIC_EGRESS_BYTE_COUNT = "EGRESS_BYTE_COUNT" CONST_METRIC_INGRESS_PACKET_COUNT = "INGRESS_PACKET_COUNT" CONST_METRIC_TX_DROPPED = "TX_DROPPED" CONST_METRIC_Q1_DROPPED = "Q1_DROPPED" CONST_METRIC_ADDRESS_MAP_INGRESS_BYTE_CNT = "ADDRESS_MAP_INGRESS_BYTE_CNT" CONST_METRIC_PACKETS_IN = "PACKETS_IN" CONST_METRIC_Q2_PKT_COUNT = "Q2_PKT_COUNT" CONST_METRIC_RX_PKT_COUNT = "RX_PKT_COUNT" CONST_METRIC_TX_BYTES = "TX_BYTES" CONST_METRIC_ANTI_SPOOF_EVENT_COUNT = "ANTI_SPOOF_EVENT_COUNT" CONST_METRIC_ACL_DENY_EVENT_COUNT = "ACL_DENY_EVENT_COUNT" CONST_METRIC_TCP_FLAG_NULL_IN = "TCP_FLAG_NULL_IN" CONST_METRIC_Q3_DROPPED = "Q3_DROPPED" CONST_METRIC_TCP_FLAG_NULL_OUT = "TCP_FLAG_NULL_OUT" CONST_METRIC_Q10_BYTES = "Q10_BYTES" CONST_METRIC_BYTES_IN = "BYTES_IN" CONST_METRIC_Q10_PKT_COUNT = "Q10_PKT_COUNT" CONST_METRIC_FIP_PRE_RATE_LIMIT_BYTES = "FIP_PRE_RATE_LIMIT_BYTES" CONST_METRIC_Q4_PKT_COUNT = "Q4_PKT_COUNT" CONST_METRIC_ADDRESS_MAP_EGRESS_BYTE_CNT = "ADDRESS_MAP_EGRESS_BYTE_CNT" CONST_METRIC_INGRESS_BYTE_COUNT = "INGRESS_BYTE_COUNT" CONST_ACTION_ALERT_POLICYGROUPCHANGE = "Alert_PolicyGroupChange" CONST_METRIC_PACKETS_IN_ERROR = "PACKETS_IN_ERROR" CONST_METRIC_TX_PKT_COUNT = "TX_PKT_COUNT" CONST_METRIC_TCP_FLAG_RST_OUT = "TCP_FLAG_RST_OUT" CONST_METRIC_Q1_PKT_COUNT = "Q1_PKT_COUNT" CONST_METRIC_ADDRESS_MAP_EGRESS_PKT_CNT = "ADDRESS_MAP_EGRESS_PKT_CNT" CONST_METRIC_ADDRESS_MAP_INGRESS_PKT_CNT = "ADDRESS_MAP_INGRESS_PKT_CNT" CONST_METRIC_RX_ERRORS = "RX_ERRORS" CONST_METRIC_Q0_BYTES = "Q0_BYTES" CONST_METRIC_PACKETS_OUT_ERROR = "PACKETS_OUT_ERROR" CONST_METRIC_BYTES_OUT = "BYTES_OUT" CONST_METRIC_L7_PACKETS_OUT = "L7_PACKETS_OUT" CONST_METRIC_Q0_PKT_COUNT = "Q0_PKT_COUNT" CONST_METRIC_L7_BYTES_IN = "L7_BYTES_IN" CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL" CONST_METRIC_TCP_FLAG_SYN_OUT = "TCP_FLAG_SYN_OUT" CONST_METRIC_PACKETS_OUT = "PACKETS_OUT" CONST_METRIC_Q10_DROPPED = "Q10_DROPPED" CONST_METRIC_Q4_DROPPED = "Q4_DROPPED" CONST_METRIC_CONNECTION_TYPE = "CONNECTION_TYPE" CONST_METRIC_Q2_BYTES = "Q2_BYTES" CONST_METRIC_FIP_PRE_RATE_LIMIT_PACKETS = "FIP_PRE_RATE_LIMIT_PACKETS" CONST_ACTION_ALERT = "Alert" CONST_METRIC_TCP_FLAG_SYN_IN = "TCP_FLAG_SYN_IN" CONST_METRIC_Q0_DROPPED = "Q0_DROPPED" CONST_METRIC_TCP_SYN_EVENT_COUNT = "TCP_SYN_EVENT_COUNT" CONST_METRIC_L7_PACKETS_IN = "L7_PACKETS_IN" def __init__(self, **kwargs): super(NUTCA, self).__init__() self._url_end_point = None self._name = None self._target_policy_group_id = None self._last_updated_by = None self._action = None self._period = None self._description = None self._metric = None self._threshold = None self._throttle_time = None self._disable = None self._display_status = None self._entity_scope = None self._count = None self._status = None self._external_id = None self._type = None self.expose_attribute(local_name="url_end_point", remote_name="URLEndPoint", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=True, is_unique=False) self.expose_attribute(local_name="target_policy_group_id", remote_name="targetPolicyGroupID", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="action", remote_name="action", attribute_type=str, is_required=True, is_unique=False, choices=[u'Alert', u'Alert_PolicyGroupChange']) self.expose_attribute(local_name="period", remote_name="period", attribute_type=int, is_required=True, is_unique=False) self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="metric", remote_name="metric", attribute_type=str, is_required=True, is_unique=False, choices=[u'ACL_DENY_EVENT_COUNT', u'ADDRESS_MAP_EGRESS_BYTE_CNT', u'ADDRESS_MAP_EGRESS_PKT_CNT', u'ADDRESS_MAP_INGRESS_BYTE_CNT', u'ADDRESS_MAP_INGRESS_PKT_CNT', u'ANTI_SPOOF_EVENT_COUNT', u'BYTES_IN', u'BYTES_OUT', u'CONNECTION_TYPE', u'EGRESS_BYTE_COUNT', u'EGRESS_PACKET_COUNT', u'FIP_PRE_RATE_LIMIT_BYTES', u'FIP_PRE_RATE_LIMIT_PACKETS', u'FIP_RATE_LIMIT_DROPPED_BYTES', u'FIP_RATE_LIMIT_DROPPED_PACKETS', u'INGRESS_BYTE_COUNT', u'INGRESS_PACKET_COUNT', u'L7_BYTES_IN', u'L7_BYTES_OUT', u'L7_PACKETS_IN', u'L7_PACKETS_OUT', u'PACKETS_DROPPED_BY_RATE_LIMIT', u'PACKETS_IN', u'PACKETS_IN_DROPPED', u'PACKETS_IN_ERROR', u'PACKETS_OUT', u'PACKETS_OUT_DROPPED', u'PACKETS_OUT_ERROR', u'Q0_BYTES', u'Q0_DROPPED', u'Q0_PKT_COUNT', u'Q10_BYTES', u'Q10_DROPPED', u'Q10_PKT_COUNT', u'Q1_BYTES', u'Q1_DROPPED', u'Q1_PKT_COUNT', u'Q2_BYTES', u'Q2_DROPPED', u'Q2_PKT_COUNT', u'Q3_BYTES', u'Q3_DROPPED', u'Q3_PKT_COUNT', u'Q4_BYTES', u'Q4_DROPPED', u'Q4_PKT_COUNT', u'RX_BYTES', u'RX_DROPPED', u'RX_ERRORS', u'RX_PKT_COUNT', u'TCP_FLAG_ACK_IN', u'TCP_FLAG_ACK_OUT', u'TCP_FLAG_NULL_IN', u'TCP_FLAG_NULL_OUT', u'TCP_FLAG_RST_IN', u'TCP_FLAG_RST_OUT', u'TCP_FLAG_SYN_IN', u'TCP_FLAG_SYN_OUT', u'TCP_SYN_EVENT_COUNT', u'TX_BYTES', u'TX_DROPPED', u'TX_ERRORS', u'TX_PKT_COUNT']) self.expose_attribute(local_name="threshold", remote_name="threshold", attribute_type=int, is_required=True, is_unique=False) self.expose_attribute(local_name="throttle_time", remote_name="throttleTime", attribute_type=int, is_required=False, is_unique=False) self.expose_attribute(local_name="disable", remote_name="disable", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="display_status", remote_name="displayStatus", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL']) self.expose_attribute(local_name="count", remote_name="count", attribute_type=int, is_required=False, is_unique=False) self.expose_attribute(local_name="status", remote_name="status", attribute_type=bool, is_required=False, is_unique=False) self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True) self.expose_attribute(local_name="type", remote_name="type", attribute_type=str, is_required=True, is_unique=False, choices=[u'BREACH', u'ROLLING_AVERAGE']) self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self.alarms = NUAlarmsFetcher.fetcher_with_object(parent_object=self, relationship="child") self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self.event_logs = NUEventLogsFetcher.fetcher_with_object(parent_object=self, relationship="child") self._compute_args(**kwargs) @property def url_end_point(self): return self._url_end_point @url_end_point.setter def url_end_point(self, value): self._url_end_point = value @property def name(self): return self._name @name.setter def name(self, value): self._name = value @property def target_policy_group_id(self): return self._target_policy_group_id @target_policy_group_id.setter def target_policy_group_id(self, value): self._target_policy_group_id = value @property def last_updated_by(self): return self._last_updated_by @last_updated_by.setter def last_updated_by(self, value): self._last_updated_by = value @property def action(self): return self._action @action.setter def action(self, value): self._action = value @property def period(self): return self._period @period.setter def period(self, value): self._period = value @property def description(self): return self._description @description.setter def description(self, value): self._description = value @property def metric(self): return self._metric @metric.setter def metric(self, value): self._metric = value @property def threshold(self): return self._threshold @threshold.setter def threshold(self, value): self._threshold = value @property def throttle_time(self): return self._throttle_time @throttle_time.setter def throttle_time(self, value): self._throttle_time = value @property def disable(self): return self._disable @disable.setter def disable(self, value): self._disable = value @property def display_status(self): return self._display_status @display_status.setter def display_status(self, value): self._display_status = value @property def entity_scope(self): return self._entity_scope @entity_scope.setter def entity_scope(self, value): self._entity_scope = value @property def count(self): return self._count @count.setter def count(self, value): self._count = value @property def status(self): return self._status @status.setter
BSD 3-Clause New or Revised License
alexa/alexa-apis-for-python
ask-sdk-model/ask_sdk_model/interfaces/viewport/viewport_state_video.py
ViewportStateVideo.__eq__
python
def __eq__(self, other): if not isinstance(other, ViewportStateVideo): return False return self.__dict__ == other.__dict__
Returns true if both objects are equal
https://github.com/alexa/alexa-apis-for-python/blob/bfe5e694daaca71bfb1a4199ca8d2514f1cac6c9/ask-sdk-model/ask_sdk_model/interfaces/viewport/viewport_state_video.py#L98-L104
import pprint import re import six import typing from enum import Enum if typing.TYPE_CHECKING: from typing import Dict, List, Optional, Union, Any from datetime import datetime from ask_sdk_model.interfaces.viewport.video.codecs import Codecs as Codecs_89738777 class ViewportStateVideo(object): deserialized_types = { 'codecs': 'list[ask_sdk_model.interfaces.viewport.video.codecs.Codecs]' } attribute_map = { 'codecs': 'codecs' } supports_multiple_types = False def __init__(self, codecs=None): self.__discriminator_value = None self.codecs = codecs def to_dict(self): result = {} for attr, _ in six.iteritems(self.deserialized_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) elif isinstance(value, Enum): result[attr] = value.value elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) else: result[attr] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str()
Apache License 2.0
googkit/googkit
googkit/lib/path.py
default_config
python
def default_config(): path = os.path.join(googkit_root(), DEFAULT_CONFIG) if not os.path.exists(path): msg = 'Default config file is not found: {path}'.format(path=path) raise GoogkitError(msg) return path
Returns a path for the default project config file. Raise a GoogkitError if the default config file is not found. See also: googkit.lib.Config#load()
https://github.com/googkit/googkit/blob/cacb37bf65e5ac19379b329beb02af907240aa60/googkit/lib/path.py#L72-L82
import os from googkit.lib.error import GoogkitError PROJECT_CONFIG = 'googkit.cfg' DATA_DIR = 'googkit_data' USER_CONFIG = '.googkit' SCRIPT_DIR = 'googkit' PLUGIN_DIR = 'plugins' LOCALE_DIR = os.path.join(DATA_DIR, 'locale') DEFAULT_CONFIG = os.path.join(DATA_DIR, 'default.cfg') TEMPLATE_DIR = os.path.join(DATA_DIR, 'template') def googkit_root(): script_dir = os.path.dirname(os.path.abspath(__file__)) return os.path.normpath(os.path.join(script_dir, '..', '..')) def project_root(cwd): current = cwd try: while not os.path.exists(os.path.join(current, PROJECT_CONFIG)): before = current current = os.path.dirname(current) if before == current: break if os.path.exists(os.path.join(current, PROJECT_CONFIG)): return current else: return None except IOError: return None def project_config(cwd): proj_root = project_root(cwd) if proj_root is None: raise GoogkitError('Project directory is not found.') project_config = os.path.join(proj_root, PROJECT_CONFIG) if not os.path.exists(project_config): raise GoogkitError('Project config file is not found.') return project_config def user_config(): home_dir = os.path.expanduser('~') user_config = os.path.join(home_dir, USER_CONFIG) return user_config if os.path.exists(user_config) else None
MIT License
pallets/werkzeug
src/werkzeug/security.py
gen_salt
python
def gen_salt(length: int) -> str: if length <= 0: raise ValueError("Salt length must be positive") return "".join(secrets.choice(SALT_CHARS) for _ in range(length))
Generate a random string of SALT_CHARS with specified ``length``.
https://github.com/pallets/werkzeug/blob/d4987ee711c3d69ca665bf3b7e5e84705b2f91b5/src/werkzeug/security.py#L131-L136
import hashlib import hmac import os import posixpath import secrets import typing as t import warnings if t.TYPE_CHECKING: pass SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" DEFAULT_PBKDF2_ITERATIONS = 260000 _os_alt_seps: t.List[str] = list( sep for sep in [os.path.sep, os.path.altsep] if sep is not None and sep != "/" ) def pbkdf2_hex( data: t.Union[str, bytes], salt: t.Union[str, bytes], iterations: int = DEFAULT_PBKDF2_ITERATIONS, keylen: t.Optional[int] = None, hashfunc: t.Optional[t.Union[str, t.Callable]] = None, ) -> str: warnings.warn( "'pbkdf2_hex' is deprecated and will be removed in Werkzeug" " 2.1. Use 'hashlib.pbkdf2_hmac().hex()' instead.", DeprecationWarning, stacklevel=2, ) return pbkdf2_bin(data, salt, iterations, keylen, hashfunc).hex() def pbkdf2_bin( data: t.Union[str, bytes], salt: t.Union[str, bytes], iterations: int = DEFAULT_PBKDF2_ITERATIONS, keylen: t.Optional[int] = None, hashfunc: t.Optional[t.Union[str, t.Callable]] = None, ) -> bytes: warnings.warn( "'pbkdf2_bin' is deprecated and will be removed in Werkzeug" " 2.1. Use 'hashlib.pbkdf2_hmac()' instead.", DeprecationWarning, stacklevel=2, ) if isinstance(data, str): data = data.encode("utf8") if isinstance(salt, str): salt = salt.encode("utf8") if not hashfunc: hash_name = "sha256" elif callable(hashfunc): hash_name = hashfunc().name else: hash_name = hashfunc return hashlib.pbkdf2_hmac(hash_name, data, salt, iterations, keylen) def safe_str_cmp(a: str, b: str) -> bool: warnings.warn( "'safe_str_cmp' is deprecated and will be removed in Werkzeug" " 2.1. Use 'hmac.compare_digest' instead.", DeprecationWarning, stacklevel=2, ) if isinstance(a, str): a = a.encode("utf-8") if isinstance(b, str): b = b.encode("utf-8") return hmac.compare_digest(a, b)
BSD 3-Clause New or Revised License
deepmipt/deeppavlov
deeppavlov/core/common/registry.py
list_models
python
def list_models() -> list: return list(_REGISTRY)
Returns a list of names of registered classes.
https://github.com/deepmipt/deeppavlov/blob/d73f45733d6b23347871aa293309730303b64450/deeppavlov/core/common/registry.py#L77-L79
import importlib import json from logging import getLogger from pathlib import Path from deeppavlov.core.common.errors import ConfigError logger = getLogger(__name__) _registry_path = Path(__file__).parent / 'registry.json' if _registry_path.exists(): with _registry_path.open(encoding='utf-8') as f: _REGISTRY = json.load(f) else: _REGISTRY = {} inverted_registry = {val: key for key, val in _REGISTRY.items()} def cls_from_str(name: str) -> type: try: module_name, cls_name = name.split(':') except ValueError: raise ConfigError('Expected class description in a `module.submodules:ClassName` form, but got `{}`' .format(name)) return getattr(importlib.import_module(module_name), cls_name) def register(name: str = None) -> type: def decorate(model_cls: type, reg_name: str = None) -> type: model_name = reg_name or short_name(model_cls) global _REGISTRY cls_name = model_cls.__module__ + ':' + model_cls.__name__ if model_name in _REGISTRY and _REGISTRY[model_name] != cls_name: logger.warning('Registry name "{}" has been already registered and will be overwritten.'.format(model_name)) _REGISTRY[model_name] = cls_name return model_cls return lambda model_cls_name: decorate(model_cls_name, name) def short_name(cls: type) -> str: return cls.__name__.split('.')[-1] def get_model(name: str) -> type: if name not in _REGISTRY: if ':' not in name: raise ConfigError("Model {} is not registered.".format(name)) return cls_from_str(name) return cls_from_str(_REGISTRY[name])
Apache License 2.0
jmcgeheeiv/pyfakefs
pyfakefs/fake_filesystem.py
FakeDirectory.entries
python
def entries(self) -> Dict[str, FakeFile]: return self._entries
Return the list of contained directory entries.
https://github.com/jmcgeheeiv/pyfakefs/blob/589bae0c58298d92fea0e463ab5104166cd6e63c/pyfakefs/fake_filesystem.py#L675-L677
import errno import heapq import io import locale import os import random import sys import traceback import uuid from collections import namedtuple from doctest import TestResults from enum import Enum from stat import ( S_IFREG, S_IFDIR, S_ISLNK, S_IFMT, S_ISDIR, S_IFLNK, S_ISREG, S_IFSOCK ) from types import ModuleType, TracebackType from typing import ( List, Optional, Callable, Union, Any, Dict, Tuple, cast, AnyStr, overload, NoReturn, ClassVar, IO, Iterator, TextIO, Type ) from pyfakefs.deprecator import Deprecator from pyfakefs.extra_packages import use_scandir from pyfakefs.fake_scandir import scandir, walk, ScanDirIter from pyfakefs.helpers import ( FakeStatResult, BinaryBufferIO, TextBufferIO, is_int_type, is_byte_string, is_unicode_string, make_string_path, IS_PYPY, to_string, matching_string, real_encoding, now, AnyPath, to_bytes ) from pyfakefs import __version__ PERM_READ = 0o400 PERM_WRITE = 0o200 PERM_EXE = 0o100 PERM_DEF = 0o777 PERM_DEF_FILE = 0o666 PERM_ALL = 0o7777 _OpenModes = namedtuple( '_OpenModes', 'must_exist can_read can_write truncate append must_not_exist' ) _OPEN_MODE_MAP = { 'r': (True, True, False, False, False, False), 'w': (False, False, True, True, False, False), 'a': (False, False, True, False, True, False), 'r+': (True, True, True, False, False, False), 'w+': (False, True, True, True, False, False), 'a+': (False, True, True, False, True, False), 'x': (False, False, True, False, False, True), 'x+': (False, True, True, False, False, True) } AnyFileWrapper = Union[ "FakeFileWrapper", "FakeDirWrapper", "StandardStreamWrapper", "FakePipeWrapper" ] AnyString = Union[str, bytes] AnyFile = Union["FakeFile", "FakeDirectory"] if sys.platform.startswith('linux'): _MAX_LINK_DEPTH = 40 else: _MAX_LINK_DEPTH = 32 NR_STD_STREAMS = 3 if sys.platform == 'win32': USER_ID = 1 GROUP_ID = 1 else: USER_ID = os.getuid() GROUP_ID = os.getgid() class OSType(Enum): LINUX = "linux" MACOS = "macos" WINDOWS = "windows" class PatchMode(Enum): OFF = 1 AUTO = 2 ON = 3 def set_uid(uid: int) -> None: global USER_ID USER_ID = uid def set_gid(gid: int) -> None: global GROUP_ID GROUP_ID = gid def reset_ids() -> None: if sys.platform == 'win32': set_uid(1) set_gid(1) else: set_uid(os.getuid()) set_gid(os.getgid()) def is_root() -> bool: return USER_ID == 0 class FakeLargeFileIoException(Exception): def __init__(self, file_path: str) -> None: super(FakeLargeFileIoException, self).__init__( 'Read and write operations not supported for ' 'fake large file: %s' % file_path) def _copy_module(old: ModuleType) -> ModuleType: saved = sys.modules.pop(old.__name__, None) new = __import__(old.__name__) if saved is not None: sys.modules[old.__name__] = saved return new class FakeFile: stat_types = ( 'st_mode', 'st_ino', 'st_dev', 'st_nlink', 'st_uid', 'st_gid', 'st_size', 'st_atime', 'st_mtime', 'st_ctime', 'st_atime_ns', 'st_mtime_ns', 'st_ctime_ns' ) def __init__(self, name: AnyStr, st_mode: int = S_IFREG | PERM_DEF_FILE, contents: Optional[AnyStr] = None, filesystem: Optional["FakeFilesystem"] = None, encoding: Optional[str] = None, errors: Optional[str] = None, side_effect: Optional[Callable[["FakeFile"], None]] = None): if filesystem is None: raise ValueError('filesystem shall not be None') self.filesystem: FakeFilesystem = filesystem self._side_effect: Optional[Callable] = side_effect self.name: AnyStr = name self.stat_result = FakeStatResult( filesystem.is_windows_fs, USER_ID, GROUP_ID, now()) if st_mode >> 12 == 0: st_mode |= S_IFREG self.stat_result.st_mode = st_mode self.st_size: int = 0 self.encoding: Optional[str] = real_encoding(encoding) self.errors: str = errors or 'strict' self._byte_contents: Optional[bytes] = self._encode_contents(contents) self.stat_result.st_size = ( len(self._byte_contents) if self._byte_contents is not None else 0) self.epoch: int = 0 self.parent_dir: Optional[FakeDirectory] = None self.xattr: Dict = {} self.opened_as: AnyString = '' @property def byte_contents(self) -> Optional[bytes]: return self._byte_contents @property def contents(self) -> Optional[str]: if isinstance(self.byte_contents, bytes): return self.byte_contents.decode( self.encoding or locale.getpreferredencoding(False), errors=self.errors) return None @property def st_ctime(self) -> float: return self.stat_result.st_ctime @st_ctime.setter def st_ctime(self, val: float) -> None: self.stat_result.st_ctime = val @property def st_atime(self) -> float: return self.stat_result.st_atime @st_atime.setter def st_atime(self, val: float) -> None: self.stat_result.st_atime = val @property def st_mtime(self) -> float: return self.stat_result.st_mtime @st_mtime.setter def st_mtime(self, val: float) -> None: self.stat_result.st_mtime = val def set_large_file_size(self, st_size: int) -> None: self._check_positive_int(st_size) if self.st_size: self.size = 0 if self.filesystem: self.filesystem.change_disk_usage(st_size, self.name, self.st_dev) self.st_size = st_size self._byte_contents = None def _check_positive_int(self, size: int) -> None: if not is_int_type(size) or size < 0: self.filesystem.raise_os_error(errno.ENOSPC, self.name) def is_large_file(self) -> bool: return self._byte_contents is None def _encode_contents( self, contents: Union[str, bytes, None]) -> Optional[bytes]: if is_unicode_string(contents): contents = bytes( cast(str, contents), self.encoding or locale.getpreferredencoding(False), self.errors) return cast(bytes, contents) def set_initial_contents(self, contents: AnyStr) -> bool: byte_contents = self._encode_contents(contents) changed = self._byte_contents != byte_contents st_size = len(byte_contents) if byte_contents else 0 current_size = self.st_size or 0 self.filesystem.change_disk_usage( st_size - current_size, self.name, self.st_dev) if self._byte_contents: self.size = 0 self._byte_contents = byte_contents self.st_size = st_size self.epoch += 1 return changed def set_contents(self, contents: AnyStr, encoding: Optional[str] = None) -> bool: self.encoding = real_encoding(encoding) changed = self.set_initial_contents(contents) if self._side_effect is not None: self._side_effect(self) return changed @property def size(self) -> int: return self.st_size @size.setter def size(self, st_size: int) -> None: self._check_positive_int(st_size) current_size = self.st_size or 0 self.filesystem.change_disk_usage( st_size - current_size, self.name, self.st_dev) if self._byte_contents: if st_size < current_size: self._byte_contents = self._byte_contents[:st_size] else: self._byte_contents += b'\0' * (st_size - current_size) self.st_size = st_size self.epoch += 1 @property def path(self) -> AnyStr: names: List[AnyStr] = [] obj: Optional[FakeFile] = self while obj: names.insert( 0, matching_string(self.name, obj.name)) obj = obj.parent_dir sep = self.filesystem.get_path_separator(names[0]) if names[0] == sep: names.pop(0) dir_path = sep.join(names) drive = self.filesystem.splitdrive(dir_path)[0] if not drive: dir_path = sep + dir_path else: dir_path = sep.join(names) dir_path = self.filesystem.absnormpath(dir_path) return dir_path @Deprecator('property path') def GetPath(self): return self.path @Deprecator('property size') def GetSize(self): return self.size @Deprecator('property size') def SetSize(self, value): self.size = value @Deprecator('property st_atime') def SetATime(self, st_atime): self.st_atime = st_atime @Deprecator('property st_mtime') def SetMTime(self, st_mtime): self.st_mtime = st_mtime @Deprecator('property st_ctime') def SetCTime(self, st_ctime): self.st_ctime = st_ctime def __getattr__(self, item: str) -> Any: if item in self.stat_types: return getattr(self.stat_result, item) return super().__getattribute__(item) def __setattr__(self, key: str, value: Any) -> None: if key in self.stat_types: return setattr(self.stat_result, key, value) return super().__setattr__(key, value) def __str__(self) -> str: return '%r(%o)' % (self.name, self.st_mode) @Deprecator('st_ino') def SetIno(self, st_ino): self.st_ino = st_ino class FakeNullFile(FakeFile): def __init__(self, filesystem: "FakeFilesystem") -> None: devnull = 'nul' if filesystem.is_windows_fs else '/dev/null' super(FakeNullFile, self).__init__( devnull, filesystem=filesystem, contents='') @property def byte_contents(self) -> bytes: return b'' def set_initial_contents(self, contents: AnyStr) -> bool: return False Deprecator.add(FakeFile, FakeFile.set_large_file_size, 'SetLargeFileSize') Deprecator.add(FakeFile, FakeFile.set_contents, 'SetContents') Deprecator.add(FakeFile, FakeFile.is_large_file, 'IsLargeFile') class FakeFileFromRealFile(FakeFile): def __init__(self, file_path: str, filesystem: "FakeFilesystem", side_effect: Optional[Callable] = None) -> None: super().__init__( name=os.path.basename(file_path), filesystem=filesystem, side_effect=side_effect) self.contents_read = False @property def byte_contents(self) -> Optional[bytes]: if not self.contents_read: self.contents_read = True with io.open(self.file_path, 'rb') as f: self._byte_contents = f.read() self.st_atime = os.stat(self.file_path).st_atime return self._byte_contents def set_contents(self, contents, encoding=None): self.contents_read = True super(FakeFileFromRealFile, self).set_contents(contents, encoding) def is_large_file(self): return False class FakeDirectory(FakeFile): def __init__(self, name: str, perm_bits: int = PERM_DEF, filesystem: Optional["FakeFilesystem"] = None): FakeFile.__init__( self, name, S_IFDIR | perm_bits, '', filesystem=filesystem) self.st_nlink += 1 self._entries: Dict[str, AnyFile] = {} def set_contents(self, contents: AnyStr, encoding: Optional[str] = None) -> bool: raise self.filesystem.raise_os_error(errno.EISDIR, self.path) @property
Apache License 2.0
onelogin/python3-saml
src/onelogin/saml2/logout_response.py
OneLogin_Saml2_Logout_Response._generate_request_id
python
def _generate_request_id(self): return OneLogin_Saml2_Utils.generate_unique_id()
Generate an unique logout response ID.
https://github.com/onelogin/python3-saml/blob/ab62b0d6f3e5ac2ae8e95ce3ed2f85389252a32d/src/onelogin/saml2/logout_response.py#L216-L220
from onelogin.saml2 import compat from onelogin.saml2.utils import OneLogin_Saml2_Utils, OneLogin_Saml2_ValidationError from onelogin.saml2.xml_templates import OneLogin_Saml2_Templates from onelogin.saml2.xml_utils import OneLogin_Saml2_XML class OneLogin_Saml2_Logout_Response(object): def __init__(self, settings, response=None): self._settings = settings self._error = None self.id = None if response is not None: self._logout_response = compat.to_string(OneLogin_Saml2_Utils.decode_base64_and_inflate(response, ignore_zip=True)) self.document = OneLogin_Saml2_XML.to_etree(self._logout_response) self.id = self.document.get('ID', None) def get_issuer(self): issuer = None issuer_nodes = self._query('/samlp:LogoutResponse/saml:Issuer') if len(issuer_nodes) == 1: issuer = OneLogin_Saml2_XML.element_text(issuer_nodes[0]) return issuer def get_status(self): entries = self._query('/samlp:LogoutResponse/samlp:Status/samlp:StatusCode') if len(entries) == 0: return None status = entries[0].attrib['Value'] return status def is_valid(self, request_data, request_id=None, raise_exceptions=False): self._error = None try: idp_data = self._settings.get_idp_data() idp_entity_id = idp_data['entityId'] get_data = request_data['get_data'] if self._settings.is_strict(): res = OneLogin_Saml2_XML.validate_xml(self.document, 'saml-schema-protocol-2.0.xsd', self._settings.is_debug_active()) if isinstance(res, str): raise OneLogin_Saml2_ValidationError( 'Invalid SAML Logout Request. Not match the saml-schema-protocol-2.0.xsd', OneLogin_Saml2_ValidationError.INVALID_XML_FORMAT ) security = self._settings.get_security_data() in_response_to = self.get_in_response_to() if request_id is not None and in_response_to and in_response_to != request_id: raise OneLogin_Saml2_ValidationError( 'The InResponseTo of the Logout Response: %s, does not match the ID of the Logout request sent by the SP: %s' % (in_response_to, request_id), OneLogin_Saml2_ValidationError.WRONG_INRESPONSETO ) issuer = self.get_issuer() if issuer is not None and issuer != idp_entity_id: raise OneLogin_Saml2_ValidationError( 'Invalid issuer in the Logout Response (expected %(idpEntityId)s, got %(issuer)s)' % { 'idpEntityId': idp_entity_id, 'issuer': issuer }, OneLogin_Saml2_ValidationError.WRONG_ISSUER ) current_url = OneLogin_Saml2_Utils.get_self_url_no_query(request_data) destination = self.document.get('Destination', None) if destination: if not OneLogin_Saml2_Utils.normalize_url(url=destination).startswith(OneLogin_Saml2_Utils.normalize_url(url=current_url)): raise OneLogin_Saml2_ValidationError( 'The LogoutResponse was received at %s instead of %s' % (current_url, destination), OneLogin_Saml2_ValidationError.WRONG_DESTINATION ) if security['wantMessagesSigned']: if 'Signature' not in get_data: raise OneLogin_Saml2_ValidationError( 'The Message of the Logout Response is not signed and the SP require it', OneLogin_Saml2_ValidationError.NO_SIGNED_MESSAGE ) return True except Exception as err: self._error = str(err) debug = self._settings.is_debug_active() if debug: print(err) if raise_exceptions: raise return False def _query(self, query): return OneLogin_Saml2_XML.query(self.document, query) def build(self, in_response_to): sp_data = self._settings.get_sp_data() self.id = self._generate_request_id() issue_instant = OneLogin_Saml2_Utils.parse_time_to_SAML(OneLogin_Saml2_Utils.now()) logout_response = OneLogin_Saml2_Templates.LOGOUT_RESPONSE % { 'id': self.id, 'issue_instant': issue_instant, 'destination': self._settings.get_idp_slo_response_url(), 'in_response_to': in_response_to, 'entity_id': sp_data['entityId'], 'status': "urn:oasis:names:tc:SAML:2.0:status:Success" } self._logout_response = logout_response def get_in_response_to(self): return self.document.get('InResponseTo') def get_response(self, deflate=True): if deflate: response = OneLogin_Saml2_Utils.deflate_and_base64_encode(self._logout_response) else: response = OneLogin_Saml2_Utils.b64encode(self._logout_response) return response def get_error(self): return self._error def get_xml(self): return self._logout_response
MIT License
citcheese/odbparser
ODBlib/ODBhelperfuncs.py
flatten_json
python
def flatten_json(dictionary): from itertools import chain, starmap def unpack(parent_key, parent_value): if isinstance(parent_value, dict): for key, value in parent_value.items(): temp1 = parent_key + '_' + key yield temp1, value elif isinstance(parent_value, list): i = 0 for value in parent_value: temp2 = parent_key + '_' + str(i) i += 1 yield temp2, value else: yield parent_key, parent_value while True: dictionary = dict(chain.from_iterable(starmap(unpack, dictionary.items()))) if not any(isinstance(value, dict) for value in dictionary.values()) and not any(isinstance(value, list) for value in dictionary.values()): break return dictionary
Flatten a nested json file
https://github.com/citcheese/odbparser/blob/7692c45084d41a725f82c96455f797cf7457a0e2/ODBlib/ODBhelperfuncs.py#L327-L358
import os import json import pandas as pd from tqdm import tqdm from colorama import Fore import sys def jsonappendfile(filepath,items): prevExists = True if not os.path.exists(filepath) or os.path.getsize(filepath)==0: prevExists = False with open(filepath, "ab+") as f: f.write("[]".encode()) with open(filepath, "ab+") as newZ: newZ.seek(-1, 2) newZ.truncate() if prevExists: newZ.write(",".encode()) for y in items[:-1]: newZ.write(json.dumps(y).encode()) newZ.write(",".encode()) newZ.write(json.dumps(items[-1]).encode()) newZ.write(']'.encode()) def checkifIPalreadyparsed(ipaddress,dbtype="Elastic",multi=False,skiptimeouts=False): import ODBconfig import ijson dbtype = dbtype.title().strip("db") basepath = ODBconfig.basepath oldips = ODBconfig.oldips if ":" in ipaddress: ipaddress = ipaddress.split(":")[0] if not basepath: basepath = os.path.join(os.getcwd(), "open directory dumps") if not os.path.exists(basepath): os.makedirs(basepath) if os.path.exists(os.path.join(basepath,f"{dbtype}Found.json")): if os.path.getsize(os.path.join(basepath, f"{dbtype}Found.json")) != 0: with open(os.path.join(basepath,f"{dbtype}Found.json")) as outfile: doneips = list(ijson.items(outfile, "item.ipaddress")) else: doneips =[] else: doneips = [] pd.set_option('display.max_colwidth', -1) doneips = doneips+oldips if multi: parsedones = [x for x in ipaddress if x in doneips] return parsedones else: if ipaddress in doneips: return True else: return False def convertjsondumptocsv(jsonfile,flattennestedjson=True,olddumps=False,getridofuselessdata=False): import pathlib from pandas.io.json import json_normalize import numpy as np p = pathlib.Path(jsonfile) foldername = p.parent.name issuesflattening = False if not os.path.exists(os.path.join(p.parent,"JSON_backups")): os.makedirs(os.path.join(p.parent,"JSON_backups")) try: if olddumps: with open(jsonfile,encoding="utf8") as f: content = f.readlines() con2 = [json.loads(x) for x in content] try: con2 =[x["_source"] for x in con2 if x['_source']] except: pass else: try: with open(jsonfile,encoding="utf8",errors="replace") as f: con2 = json.load(f) except ValueError: with open(jsonfile, encoding="utf8", errors="replace") as f: content = f.read() con2 = [] decoder = json.JSONDecoder() while content: value, new_start = decoder.raw_decode(content) content = content[new_start:].strip() con2.append(value) outfile = jsonfile.replace('.json','.csv') if flattennestedjson: try: dic_flattened = [flatten_json(d) for d in con2] df = pd.DataFrame(dic_flattened) except Exception: try: df = json_normalize(con2, errors="ignore") except Exception as e: df = pd.DataFrame(con2) issuesflattening = True else: df = pd.DataFrame(con2) if getridofuselessdata: df.replace("blank", np.nan, inplace=True) df.replace("Null", np.nan, inplace=True) df.replace("", np.nan, inplace=True) df = df.astype("object") df.dropna(axis=1, how='all', inplace=True) droplist = [x for x in df.columns if all(len(str(y).split(".",1)[0]) < 3 for y in df[x].tolist())] df.drop(droplist, axis=1, inplace=True) df = df.dropna(axis=1, thresh=int(.001 * len(df))) df.replace(np.nan, '', regex=True, inplace=True) df.replace(np.nan, '', regex=True,inplace=True) cols = df.columns dropcols = ["_id", '__v'] cols1 = [x for x in cols if x in dropcols] df.drop(cols1, axis=1, inplace=True) df = df.applymap(str) df.drop_duplicates(inplace=True) df = df.replace({'\n': '<br>',"\r":"<br>"}, regex=True) df.to_csv(outfile,index=False,escapechar='\n') os.rename(jsonfile,os.path.join(p.parent,"JSON_backups",p.name)) try: os.rename(jsonfile.replace(".json","_mapping.json"), os.path.join(p.parent, "JSON_backups", p.name.replace(".json","_mapping.json"))) except: pass except Exception as e: issuesflattening = True print(f"{jsonfile}: {str(e)}") return issuesflattening def jsonfoldert0mergedcsv(folder,flattennestedjson=False,olddumps=False,getridofuselessdata=False): files = [x for x in os.listdir(folder) if x.endswith(".json") and "_mapping.json" not in x] t = tqdm(files,desc="Now Converting",leave=True) issues = [] for x in t: t.set_description(F"Converting: {Fore.LIGHTRED_EX}{x}{Fore.RESET}") t.refresh() res = convertjsondumptocsv(os.path.join(folder,x),flattennestedjson=flattennestedjson,olddumps=olddumps,getridofuselessdata=getridofuselessdata) if res: issues.append(x) if issues: print(F" {Fore.LIGHTGREEN_EX}Error{Fore.RESET} with following files:") for y in issues: print(f" {y}") def megajsonconvert(directory,flattennestedjson=False,olddumps=False): for folder in os.listdir(directory): if os.path.isdir(os.path.join(directory,folder)): jsonfoldert0mergedcsv(os.path.join(directory,folder),flattennestedjson=flattennestedjson,olddumps=olddumps) def convert_timestamp(item_date_object): import datetime if isinstance(item_date_object, (datetime.date, datetime.datetime)): return item_date_object.timestamp() def iterate_all(iterable, returned="key"): if isinstance(iterable, dict): for key, value in iterable.items(): if returned == "key": yield key elif returned == "value": if not (isinstance(value, dict) or isinstance(value, list)): yield value else: raise ValueError("'returned' keyword only accepts 'key' or 'value'.") for ret in iterate_all(value, returned=returned): yield ret elif isinstance(iterable, list): for el in iterable: for ret in iterate_all(el, returned=returned): yield ret def shodan_query(query,limit=1000): import ODBconfig import shodan counter =0 limit = int(limit) try: api = shodan.Shodan(ODBconfig.SHODAN_API_KEY) result = api.search_cursor(query) shodanres = [] for x in result: shodanres.append((x["ip_str"],x["product"],x["port"])) counter+=1 if counter>=limit: break return shodanres except shodan.APIError as e: print(Fore.RED + e.value + Fore.RESET) return False def valid_ip(address): if ":" in address: address = address.split(":",1)[0] try: host_bytes = address.split('.') valid = [int(b) for b in host_bytes] valid = [b for b in valid if b >= 0 and b<=255] return len(host_bytes) == 4 and len(valid) == 4 except: return False def ipsfromfile(filepath): with open(filepath,encoding="utf8",errors="ignore") as f: ips = f.readlines() ips = [x.replace("\n", "").strip("/https//:") if not x[0].isdigit() else x.replace("\n", "") for x in ips] ips = list(set((filter(None,ips)))) goodips = [x for x in ips if valid_ip(x)] badips = (set(ips).difference(goodips)) return goodips,badips def ipsfromclipboard(): import pyperclip ips = pyperclip.paste().splitlines() ips = list(set((filter(None, ips)))) ips = [x.strip("/https//:") if not x[0].isdigit() else x for x in ips] goodips = [x for x in ips if valid_ip(x)] badips = (set(ips).difference(goodips)) return goodips,badips def printsummary(donedbs,totalrecords): from colorama import Fore summ = [f"{Fore.LIGHTRED_EX}RUN SUMMARY{Fore.CYAN}", F"{Fore.RESET}Dumped {Fore.LIGHTBLUE_EX}{str(donedbs)}{Fore.RESET} databases with a total of {Fore.LIGHTBLUE_EX}{totalrecords:,d}{Fore.RESET} records.{Fore.CYAN}", f"{Fore.RESET}{Fore.LIGHTRED_EX}Have a nice day!{Fore.CYAN}"] maxlen = max(len(s) for s in summ) colwidth = maxlen - 12 border = colwidth + 2 print(f"{Fore.CYAN}{'#' * border}") print(f'#{summ[0]:^{colwidth + 10}}#') print(f'#{summ[1]:^{colwidth + 30}}#') print(f'#{summ[2]:^{colwidth + 15}}#') print(f"{'#' * border}{Fore.RESET}") def updatestatsfile(donedbs=0,totalrecords=0,parsedservers=0,type="ElasticSearch"): absolute_path = os.path.dirname(os.path.abspath(__file__)) fpath = os.path.join(absolute_path,"ODBstats.json") if not os.path.exists(fpath): prevExists = False item = {} elastic ={} mongo={} elastic["databases dumped"]=0 elastic["total records dumped"] =0 elastic["parsed servers"] = 0 mongo["databases dumped"]=0 mongo["total records dumped"] =0 mongo["parsed servers"] = 0 item["ElasticSearch"] = elastic item["MongoDB"] = mongo with open(fpath, "w") as f: json.dump(item,f) with open(fpath) as f: con = json.load(f) con[type]["total records dumped"]=con[type]["total records dumped"]+totalrecords con[type]["databases dumped"]=con[type]["databases dumped"]+donedbs con[type]["parsed servers"]=con[type]["parsed servers"]+parsedservers with open(fpath, "w") as f: json.dump(con, f) def getstats(): absolute_path = os.path.dirname(os.path.abspath(__file__)) fpath = os.path.join(absolute_path, "ODBstats.json") if not os.path.exists(fpath): item = {} elastic ={} mongo={} elastic["databases dumped"]=0 elastic["total records dumped"] =0 elastic["parsed servers"] = 0 mongo["databases dumped"]=0 mongo["total records dumped"] =0 mongo["parsed servers"] = 0 item["ElasticSearch"] = elastic item["MongoDB"] = mongo with open(fpath, "w") as f: json.dump(item,f) with open(fpath) as f: con = json.load(f) donedbs = 0 parsed =0 totalrecs = 0 for x in con.keys(): parsed += con[x]["parsed servers"] totalrecs += con[x]["total records dumped"] donedbs += con[x]["databases dumped"] return parsed,totalrecs,donedbs
MIT License
googleapis/python-aiplatform
google/cloud/aiplatform_v1/services/dataset_service/client.py
DatasetServiceClient.create_dataset
python
def create_dataset( self, request: Union[dataset_service.CreateDatasetRequest, dict] = None, *, parent: str = None, dataset: gca_dataset.Dataset = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gac_operation.Operation: has_flattened_params = any([parent, dataset]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) if not isinstance(request, dataset_service.CreateDatasetRequest): request = dataset_service.CreateDatasetRequest(request) if parent is not None: request.parent = parent if dataset is not None: request.dataset = dataset rpc = self._transport._wrapped_methods[self._transport.create_dataset] metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) response = gac_operation.from_gapic( response, self._transport.operations_client, gca_dataset.Dataset, metadata_type=dataset_service.CreateDatasetOperationMetadata, ) return response
r"""Creates a Dataset. Args: request (Union[google.cloud.aiplatform_v1.types.CreateDatasetRequest, dict]): The request object. Request message for [DatasetService.CreateDataset][google.cloud.aiplatform.v1.DatasetService.CreateDataset]. parent (str): Required. The resource name of the Location to create the Dataset in. Format: ``projects/{project}/locations/{location}`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. dataset (google.cloud.aiplatform_v1.types.Dataset): Required. The Dataset to create. This corresponds to the ``dataset`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.api_core.operation.Operation: An object representing a long-running operation. The result type for the operation will be :class:`google.cloud.aiplatform_v1.types.Dataset` A collection of DataItems and Annotations on them.
https://github.com/googleapis/python-aiplatform/blob/c1c2326b2342ab1b6f4c4ce3852e63376eae740d/google/cloud/aiplatform_v1/services/dataset_service/client.py#L426-L515
from collections import OrderedDict from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.transport import mtls from google.auth.transport.grpc import SslCredentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from google.api_core import operation as gac_operation from google.api_core import operation_async from google.cloud.aiplatform_v1.services.dataset_service import pagers from google.cloud.aiplatform_v1.types import annotation from google.cloud.aiplatform_v1.types import annotation_spec from google.cloud.aiplatform_v1.types import data_item from google.cloud.aiplatform_v1.types import dataset from google.cloud.aiplatform_v1.types import dataset as gca_dataset from google.cloud.aiplatform_v1.types import dataset_service from google.cloud.aiplatform_v1.types import encryption_spec from google.cloud.aiplatform_v1.types import operation as gca_operation from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 from google.protobuf import struct_pb2 from google.protobuf import timestamp_pb2 from .transports.base import DatasetServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DatasetServiceGrpcTransport from .transports.grpc_asyncio import DatasetServiceGrpcAsyncIOTransport class DatasetServiceClientMeta(type): _transport_registry = ( OrderedDict() ) _transport_registry["grpc"] = DatasetServiceGrpcTransport _transport_registry["grpc_asyncio"] = DatasetServiceGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[DatasetServiceTransport]: if label: return cls._transport_registry[label] return next(iter(cls._transport_registry.values())) class DatasetServiceClient(metaclass=DatasetServiceClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint mtls_endpoint_re = re.compile( r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?" ) m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() if mtls or not googledomain: return api_endpoint if sandbox: return api_endpoint.replace( "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") DEFAULT_ENDPOINT = "aiplatform.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( DEFAULT_ENDPOINT ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials return cls(*args, **kwargs) @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @property def transport(self) -> DatasetServiceTransport: return self._transport @staticmethod def annotation_path( project: str, location: str, dataset: str, data_item: str, annotation: str, ) -> str: return "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}/annotations/{annotation}".format( project=project, location=location, dataset=dataset, data_item=data_item, annotation=annotation, ) @staticmethod def parse_annotation_path(path: str) -> Dict[str, str]: m = re.match( r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)/dataItems/(?P<data_item>.+?)/annotations/(?P<annotation>.+?)$", path, ) return m.groupdict() if m else {} @staticmethod def annotation_spec_path( project: str, location: str, dataset: str, annotation_spec: str, ) -> str: return "projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}".format( project=project, location=location, dataset=dataset, annotation_spec=annotation_spec, ) @staticmethod def parse_annotation_spec_path(path: str) -> Dict[str, str]: m = re.match( r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)/annotationSpecs/(?P<annotation_spec>.+?)$", path, ) return m.groupdict() if m else {} @staticmethod def data_item_path( project: str, location: str, dataset: str, data_item: str, ) -> str: return "projects/{project}/locations/{location}/datasets/{dataset}/dataItems/{data_item}".format( project=project, location=location, dataset=dataset, data_item=data_item, ) @staticmethod def parse_data_item_path(path: str) -> Dict[str, str]: m = re.match( r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)/dataItems/(?P<data_item>.+?)$", path, ) return m.groupdict() if m else {} @staticmethod def dataset_path(project: str, location: str, dataset: str,) -> str: return "projects/{project}/locations/{location}/datasets/{dataset}".format( project=project, location=location, dataset=dataset, ) @staticmethod def parse_dataset_path(path: str) -> Dict[str, str]: m = re.match( r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/datasets/(?P<dataset>.+?)$", path, ) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @staticmethod def parse_common_billing_account_path(path: str) -> Dict[str, str]: m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_folder_path(folder: str,) -> str: return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: m = re.match(r"^folders/(?P<folder>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_organization_path(organization: str,) -> str: return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: m = re.match(r"^organizations/(?P<organization>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_project_path(project: str,) -> str: return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P<project>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_location_path(project: str, location: str,) -> str: return "projects/{project}/locations/{location}".format( project=project, location=location, ) @staticmethod def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path) return m.groupdict() if m else {} def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, DatasetServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: if isinstance(client_options, dict): client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = bool( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: is_mtls = True client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() if is_mtls: client_cert_source_func = mtls.default_client_cert_source() else: client_cert_source_func = None if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint else: use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": if is_mtls: api_endpoint = self.DEFAULT_MTLS_ENDPOINT else: api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " "values: never, auto, always" ) if isinstance(transport, DatasetServiceTransport): if credentials or client_options.credentials_file: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) if client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) self._transport = transport else: Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, )
Apache License 2.0
qkaren/counterfactual-storyrw
third_party/texar/texar/data/vocabulary.py
Vocab.bos_token
python
def bos_token(self): return self._bos_token
A string of the special token indicating the beginning of sequence.
https://github.com/qkaren/counterfactual-storyrw/blob/5e138d4ad11dd5d1d21dc20d869ffae594201734/third_party/texar/texar/data/vocabulary.py#L263-L266
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import warnings from collections import defaultdict import tensorflow as tf from tensorflow import gfile import numpy as np from texar.utils.utils import dict_lookup __all__ = [ "SpecialTokens", "Vocab" ] class SpecialTokens(object): PAD = "<PAD>" BOS = "<BOS>" EOS = "<EOS>" UNK = "<UNK>" def _make_defaultdict(keys, values, default_value): dict_ = defaultdict(lambda: default_value) for k, v in zip(keys, values): dict_[k] = v return dict_ class Vocab(object): def __init__(self, filename, pad_token=SpecialTokens.PAD, bos_token=SpecialTokens.BOS, eos_token=SpecialTokens.EOS, unk_token=SpecialTokens.UNK): self._filename = filename self._pad_token = pad_token self._bos_token = bos_token self._eos_token = eos_token self._unk_token = unk_token self._id_to_token_map, self._token_to_id_map, self._id_to_token_map_py, self._token_to_id_map_py = self.load(self._filename) def load(self, filename): with gfile.GFile(filename) as vocab_file: vocab = list(tf.compat.as_text(line.strip()) for line in vocab_file) warnings.simplefilter("ignore", UnicodeWarning) if self._bos_token in vocab: raise ValueError("Special begin-of-seq token already exists in the " "vocabulary: '%s'" % self._bos_token) if self._eos_token in vocab: raise ValueError("Special end-of-seq token already exists in the " "vocabulary: '%s'" % self._eos_token) if self._unk_token in vocab: raise ValueError("Special UNK token already exists in the " "vocabulary: '%s'" % self._unk_token) if self._pad_token in vocab: raise ValueError("Special padding token already exists in the " "vocabulary: '%s'" % self._pad_token) warnings.simplefilter("default", UnicodeWarning) vocab = [self._pad_token, self._bos_token, self._eos_token, self._unk_token] + vocab unk_token_idx = 3 vocab_size = len(vocab) vocab_idx = np.arange(vocab_size) id_to_token_map = tf.contrib.lookup.HashTable( tf.contrib.lookup.KeyValueTensorInitializer( vocab_idx, vocab, key_dtype=tf.int64, value_dtype=tf.string), self._unk_token) token_to_id_map = tf.contrib.lookup.HashTable( tf.contrib.lookup.KeyValueTensorInitializer( vocab, vocab_idx, key_dtype=tf.string, value_dtype=tf.int64), unk_token_idx) id_to_token_map_py = _make_defaultdict( vocab_idx, vocab, self._unk_token) token_to_id_map_py = _make_defaultdict( vocab, vocab_idx, unk_token_idx) return id_to_token_map, token_to_id_map, id_to_token_map_py, token_to_id_map_py def map_ids_to_tokens(self, ids): return self.id_to_token_map.lookup(tf.to_int64(ids)) def map_tokens_to_ids(self, tokens): return self.token_to_id_map.lookup(tokens) def map_ids_to_tokens_py(self, ids): return dict_lookup(self.id_to_token_map_py, ids, self.unk_token) def map_tokens_to_ids_py(self, tokens): return dict_lookup(self.token_to_id_map_py, tokens, self.unk_token_id) @property def id_to_token_map(self): return self._id_to_token_map @property def token_to_id_map(self): return self._token_to_id_map @property def id_to_token_map_py(self): return self._id_to_token_map_py @property def token_to_id_map_py(self): return self._token_to_id_map_py @property def size(self): return len(self.token_to_id_map_py) @property
MIT License
rajammanabrolu/worldgeneration
evennia-engine/evennia/evennia/utils/ansi.py
strip_raw_ansi
python
def strip_raw_ansi(string, parser=ANSI_PARSER): return parser.strip_raw_codes(string)
Remove raw ansi codes from string. This assumes pure ANSI-bytecodes in the string. Args: string (str): The string to parse. parser (bool, optional): The parser to use. Returns: string (str): the stripped string.
https://github.com/rajammanabrolu/worldgeneration/blob/5e97df013399e1a401d0a7ec184c4b9eb3100edd/evennia-engine/evennia/evennia/utils/ansi.py#L498-L511
import functools import re from collections import OrderedDict from django.conf import settings from evennia.utils import utils from evennia.utils import logger from evennia.utils.utils import to_str ANSI_BEEP = "\07" ANSI_ESCAPE = "\033" ANSI_NORMAL = "\033[0m" ANSI_UNDERLINE = "\033[4m" ANSI_HILITE = "\033[1m" ANSI_UNHILITE = "\033[22m" ANSI_BLINK = "\033[5m" ANSI_INVERSE = "\033[7m" ANSI_INV_HILITE = "\033[1;7m" ANSI_INV_BLINK = "\033[7;5m" ANSI_BLINK_HILITE = "\033[1;5m" ANSI_INV_BLINK_HILITE = "\033[1;5;7m" ANSI_BLACK = "\033[30m" ANSI_RED = "\033[31m" ANSI_GREEN = "\033[32m" ANSI_YELLOW = "\033[33m" ANSI_BLUE = "\033[34m" ANSI_MAGENTA = "\033[35m" ANSI_CYAN = "\033[36m" ANSI_WHITE = "\033[37m" ANSI_BACK_BLACK = "\033[40m" ANSI_BACK_RED = "\033[41m" ANSI_BACK_GREEN = "\033[42m" ANSI_BACK_YELLOW = "\033[43m" ANSI_BACK_BLUE = "\033[44m" ANSI_BACK_MAGENTA = "\033[45m" ANSI_BACK_CYAN = "\033[46m" ANSI_BACK_WHITE = "\033[47m" ANSI_RETURN = "\r\n" ANSI_TAB = "\t" ANSI_SPACE = " " ANSI_ESCAPES = ("{{", "\\\\", "\|\|") _PARSE_CACHE = OrderedDict() _PARSE_CACHE_SIZE = 10000 _COLOR_NO_DEFAULT = settings.COLOR_NO_DEFAULT class ANSIParser(object): ansi_map = [ (r"|n", ANSI_NORMAL), (r"|/", ANSI_RETURN), (r"|-", ANSI_TAB), (r"|_", ANSI_SPACE), (r"|*", ANSI_INVERSE), (r"|^", ANSI_BLINK), (r"|u", ANSI_UNDERLINE), (r"|r", ANSI_HILITE + ANSI_RED), (r"|g", ANSI_HILITE + ANSI_GREEN), (r"|y", ANSI_HILITE + ANSI_YELLOW), (r"|b", ANSI_HILITE + ANSI_BLUE), (r"|m", ANSI_HILITE + ANSI_MAGENTA), (r"|c", ANSI_HILITE + ANSI_CYAN), (r"|w", ANSI_HILITE + ANSI_WHITE), (r"|x", ANSI_HILITE + ANSI_BLACK), (r"|R", ANSI_UNHILITE + ANSI_RED), (r"|G", ANSI_UNHILITE + ANSI_GREEN), (r"|Y", ANSI_UNHILITE + ANSI_YELLOW), (r"|B", ANSI_UNHILITE + ANSI_BLUE), (r"|M", ANSI_UNHILITE + ANSI_MAGENTA), (r"|C", ANSI_UNHILITE + ANSI_CYAN), (r"|W", ANSI_UNHILITE + ANSI_WHITE), (r"|X", ANSI_UNHILITE + ANSI_BLACK), (r"|h", ANSI_HILITE), (r"|H", ANSI_UNHILITE), (r"|!R", ANSI_RED), (r"|!G", ANSI_GREEN), (r"|!Y", ANSI_YELLOW), (r"|!B", ANSI_BLUE), (r"|!M", ANSI_MAGENTA), (r"|!C", ANSI_CYAN), (r"|!W", ANSI_WHITE), (r"|!X", ANSI_BLACK), (r"|[R", ANSI_BACK_RED), (r"|[G", ANSI_BACK_GREEN), (r"|[Y", ANSI_BACK_YELLOW), (r"|[B", ANSI_BACK_BLUE), (r"|[M", ANSI_BACK_MAGENTA), (r"|[C", ANSI_BACK_CYAN), (r"|[W", ANSI_BACK_WHITE), (r"|[X", ANSI_BACK_BLACK), ] ansi_xterm256_bright_bg_map = [ (r"|[r", r"|[500"), (r"|[g", r"|[050"), (r"|[y", r"|[550"), (r"|[b", r"|[005"), (r"|[m", r"|[505"), (r"|[c", r"|[055"), (r"|[w", r"|[555"), (r"|[x", r"|[222"), ] if settings.COLOR_NO_DEFAULT: ansi_map = settings.COLOR_ANSI_EXTRA_MAP xterm256_fg = settings.COLOR_XTERM256_EXTRA_FG xterm256_bg = settings.COLOR_XTERM256_EXTRA_BG xterm256_gfg = settings.COLOR_XTERM256_EXTRA_GFG xterm256_gbg = settings.COLOR_XTERM256_EXTRA_GBG ansi_xterm256_bright_bg_map = settings.COLOR_ANSI_XTERM256_BRIGHT_BG_EXTRA_MAP else: xterm256_fg = [r"\|([0-5])([0-5])([0-5])"] xterm256_bg = [r"\|\[([0-5])([0-5])([0-5])"] xterm256_gfg = [r"\|=([a-z])"] xterm256_gbg = [r"\|\[=([a-z])"] ansi_map += settings.COLOR_ANSI_EXTRA_MAP xterm256_fg += settings.COLOR_XTERM256_EXTRA_FG xterm256_bg += settings.COLOR_XTERM256_EXTRA_BG xterm256_gfg += settings.COLOR_XTERM256_EXTRA_GFG xterm256_gbg += settings.COLOR_XTERM256_EXTRA_GBG ansi_xterm256_bright_bg_map += settings.COLOR_ANSI_XTERM256_BRIGHT_BG_EXTRA_MAP mxp_re = r"\|lc(.*?)\|lt(.*?)\|le" brightbg_sub = re.compile( r"|".join([r"(?<!\|)%s" % re.escape(tup[0]) for tup in ansi_xterm256_bright_bg_map]), re.DOTALL, ) xterm256_fg_sub = re.compile(r"|".join(xterm256_fg), re.DOTALL) xterm256_bg_sub = re.compile(r"|".join(xterm256_bg), re.DOTALL) xterm256_gfg_sub = re.compile(r"|".join(xterm256_gfg), re.DOTALL) xterm256_gbg_sub = re.compile(r"|".join(xterm256_gbg), re.DOTALL) ansi_sub = re.compile(r"|".join([re.escape(tup[0]) for tup in ansi_map]), re.DOTALL) mxp_sub = re.compile(mxp_re, re.DOTALL) ansi_map_dict = dict(ansi_map) ansi_xterm256_bright_bg_map_dict = dict(ansi_xterm256_bright_bg_map) ansi_re = r"\033\[[0-9;]+m" ansi_regex = re.compile(ansi_re) ansi_escapes = re.compile(r"(%s)" % "|".join(ANSI_ESCAPES), re.DOTALL) def sub_ansi(self, ansimatch): return self.ansi_map_dict.get(ansimatch.group(), "") def sub_brightbg(self, ansimatch): return self.ansi_xterm256_bright_bg_map_dict.get(ansimatch.group(), "") def sub_xterm256(self, rgbmatch, use_xterm256=False, color_type="fg"): if not rgbmatch: return "" background = color_type in ("bg", "gbg") grayscale = color_type in ("gfg", "gbg") if not grayscale: try: red, green, blue = [int(val) for val in rgbmatch.groups() if val is not None] except (IndexError, ValueError): logger.log_trace() return rgbmatch.group(0) else: try: letter = [val for val in rgbmatch.groups() if val is not None][0] except IndexError: logger.log_trace() return rgbmatch.group(0) if letter == "a": colval = 16 elif letter == "z": colval = 231 else: colval = 134 + ord(letter) gray = (ord(letter) - 97) / 5.0 red, green, blue = gray, gray, gray if use_xterm256: if not grayscale: colval = 16 + (red * 36) + (green * 6) + blue return "\033[%s8;5;%sm" % (3 + int(background), colval) else: if red == green == blue and red < 3: if background: return ANSI_BACK_BLACK elif red >= 1: return ANSI_HILITE + ANSI_BLACK else: return ANSI_NORMAL + ANSI_BLACK elif red == green == blue: if background: return ANSI_BACK_WHITE elif red >= 4: return ANSI_HILITE + ANSI_WHITE else: return ANSI_NORMAL + ANSI_WHITE elif red > green and red > blue: if background: return ANSI_BACK_RED elif red >= 3: return ANSI_HILITE + ANSI_RED else: return ANSI_NORMAL + ANSI_RED elif red == green and red > blue: if background: return ANSI_BACK_YELLOW elif red >= 3: return ANSI_HILITE + ANSI_YELLOW else: return ANSI_NORMAL + ANSI_YELLOW elif red == blue and red > green: if background: return ANSI_BACK_MAGENTA elif red >= 3: return ANSI_HILITE + ANSI_MAGENTA else: return ANSI_NORMAL + ANSI_MAGENTA elif green > blue: if background: return ANSI_BACK_GREEN elif green >= 3: return ANSI_HILITE + ANSI_GREEN else: return ANSI_NORMAL + ANSI_GREEN elif green == blue: if background: return ANSI_BACK_CYAN elif green >= 3: return ANSI_HILITE + ANSI_CYAN else: return ANSI_NORMAL + ANSI_CYAN else: if background: return ANSI_BACK_BLUE elif blue >= 3: return ANSI_HILITE + ANSI_BLUE else: return ANSI_NORMAL + ANSI_BLUE def strip_raw_codes(self, string): return self.ansi_regex.sub("", string) def strip_mxp(self, string): return self.mxp_sub.sub(r"\2", string) def parse_ansi(self, string, strip_ansi=False, xterm256=False, mxp=False): if hasattr(string, "_raw_string"): if strip_ansi: return string.clean() else: return string.raw() if not string: return "" global _PARSE_CACHE cachekey = "%s-%s-%s-%s" % (string, strip_ansi, xterm256, mxp) if cachekey in _PARSE_CACHE: return _PARSE_CACHE[cachekey] string = self.brightbg_sub.sub(self.sub_brightbg, string) def do_xterm256_fg(part): return self.sub_xterm256(part, xterm256, "fg") def do_xterm256_bg(part): return self.sub_xterm256(part, xterm256, "bg") def do_xterm256_gfg(part): return self.sub_xterm256(part, xterm256, "gfg") def do_xterm256_gbg(part): return self.sub_xterm256(part, xterm256, "gbg") in_string = utils.to_str(string) parsed_string = [] parts = self.ansi_escapes.split(in_string) + [" "] for part, sep in zip(parts[::2], parts[1::2]): pstring = self.xterm256_fg_sub.sub(do_xterm256_fg, part) pstring = self.xterm256_bg_sub.sub(do_xterm256_bg, pstring) pstring = self.xterm256_gfg_sub.sub(do_xterm256_gfg, pstring) pstring = self.xterm256_gbg_sub.sub(do_xterm256_gbg, pstring) pstring = self.ansi_sub.sub(self.sub_ansi, pstring) parsed_string.append("%s%s" % (pstring, sep[0].strip())) parsed_string = "".join(parsed_string) if not mxp: parsed_string = self.strip_mxp(parsed_string) if strip_ansi: return self.strip_raw_codes(parsed_string) _PARSE_CACHE[cachekey] = parsed_string if len(_PARSE_CACHE) > _PARSE_CACHE_SIZE: _PARSE_CACHE.popitem(last=False) return parsed_string ANSI_PARSER = ANSIParser() def parse_ansi(string, strip_ansi=False, parser=ANSI_PARSER, xterm256=False, mxp=False): return parser.parse_ansi(string, strip_ansi=strip_ansi, xterm256=xterm256, mxp=mxp) def strip_ansi(string, parser=ANSI_PARSER): return parser.parse_ansi(string, strip_ansi=True)
MIT License
chineseglue/pyclue
PyCLUE/utils/classifier_utils/core.py
file_based_input_fn_builder
python
def file_based_input_fn_builder( input_file, seq_length, is_training, drop_remainder ): name_to_features = { "input_ids": tf.FixedLenFeature([seq_length], tf.int64), "input_mask": tf.FixedLenFeature([seq_length], tf.int64), "segment_ids": tf.FixedLenFeature([seq_length], tf.int64), "label_ids": tf.FixedLenFeature([], tf.int64), "is_real_example": tf.FixedLenFeature([], tf.int64) } def _decode_record(record, name_to_features): examples = tf.parse_single_example(record, name_to_features) for name in list(examples.keys()): t = examples[name] if t.dtype == tf.int64: t = tf.to_int32(t) examples[name] = t return examples def input_fn(params): batch_size = params["batch_size"] d = tf.data.TFRecordDataset(input_file) if is_training: d = d.repeat() d = d.shuffle(buffer_size=100) d = d.apply( tf.contrib.data.map_and_batch( lambda record: _decode_record(record, name_to_features), batch_size=batch_size, drop_remainder=drop_remainder ) ) return d return input_fn
Creates an `input_fn` closure to be passed to TPUEstimator.
https://github.com/chineseglue/pyclue/blob/0088f97f5da5903e720cbd48c7a558a7f1d7e836/PyCLUE/utils/classifier_utils/core.py#L398-L445
from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import json import collections import numpy as np import tensorflow as tf from . import tokenization, modeling from . import optimization_finetuning as optimization from ..configs.model_configs import PRETRAINED_LM_DICT, PRETRAINED_LM_CONFIG, PRETRAINED_LM_CKPT _CWD = os.path.dirname(__file__) DATA_DIR = os.path.abspath(os.path.join(_CWD, "../../datasets")) OUTPUT_DIR = os.path.abspath(os.path.join(_CWD, "../../task_outputs/classifications")) PRETRAINED_LM_DIR = os.path.abspath(os.path.join(_CWD, "../../pretrained_lm")) __all__ = [ "TaskConfigs", "UserConfigs", "InputExample", "PaddingInputExample", "InputFeatures", "DataProcessor", "ClassificationProcessor","convert_single_example", "file_based_input_fn_builder", "create_model", "model_fn_builder", "run_classifier" ] default_configs = { "task_name": None, "pretrained_lm_name": None, "do_train": False, "do_eval": False, "do_predict": False, "data_dir": None, "output_dir": None, "vocab_file": None, "bert_config_file": None, "init_checkpoint": None, "do_lower_case": True, "max_seq_length": 128, "train_batch_size": 32, "eval_batch_size": 8, "predict_batch_size": 8, "learning_rate": 5e-5, "num_train_epochs": 3.0, "warmup_proportion": 0.1, "save_checkpoints_steps": 1000, "iterations_per_loop": 1000, "use_tpu": False, "tpu_name": None, "tpu_zone": None, "gcp_project": None, "master": None, "num_tpu_cores": 8, "verbose": 0 } class TaskConfigs(object): def __init__(self, configs): self.task_name = configs.get("task_name").lower() or "user_defined_task" self.pretrained_lm_name = configs.get("pretrained_lm_name").lower() or "user_defined_pretrained_lm" self.do_train = configs.get("do_train") self.do_eval = configs.get("do_eval") self.do_predict = configs.get("do_predict") self.data_dir = configs.get("data_dir") or os.path.join(DATA_DIR, self.task_name) self.output_dir = configs.get("output_dir") or os.path.join(OUTPUT_DIR, self.task_name, self.pretrained_lm_name) self.vocab_file = configs.get("vocab_file") or os.path.join(PRETRAINED_LM_DIR, self.pretrained_lm_name, PRETRAINED_LM_DICT.get(self.pretrained_lm_name), "vocab.txt") self.bert_config_file = configs.get("bert_config_file") or os.path.join(PRETRAINED_LM_DIR, self.pretrained_lm_name, PRETRAINED_LM_DICT.get(self.pretrained_lm_name), PRETRAINED_LM_CONFIG.get(self.pretrained_lm_name)) self.init_checkpoint = configs.get("init_checkpoint") or os.path.join(PRETRAINED_LM_DIR, self.pretrained_lm_name, PRETRAINED_LM_DICT.get(self.pretrained_lm_name), PRETRAINED_LM_CKPT.get(self.pretrained_lm_name)) self.do_lower_case = configs.get("do_lower_case") self.max_seq_length = configs.get("max_seq_length") self.train_batch_size = configs.get("train_batch_size") self.eval_batch_size = configs.get("eval_batch_size") self.predict_batch_size = configs.get("predict_batch_size") self.learning_rate = configs.get("learning_rate") self.num_train_epochs = configs.get("num_train_epochs") self.warmup_proportion = configs.get("warmup_proportion") self.save_checkpoints_steps = configs.get("save_checkpoints_steps") self.iterations_per_loop = configs.get("iterations_per_loop") self.use_tpu = configs.get("use_tpu") self.tpu_name = configs.get("tpu_name") self.tpu_zone = configs.get("tpu_zone") self.gcp_project = configs.get("gcp_project") self.master = configs.get("master") self.num_tpu_cores = configs.get("num_tpu_cores") self.verbose = configs.get("verbose") class UserConfigs(TaskConfigs): def __init__(self, configs): self.label_column = configs.get("label_column") self.text_a_column = configs.get("text_a_column") self.text_b_column = configs.get("text_b_column") self.delimiter = configs.get("delimiter") self.ignore_header = configs.get("ignore_header") self.min_seq_length = configs.get("min_seq_length") self.file_type = configs.get("file_type") super().__init__(configs) class InputExample(object): def __init__(self, guid, text_a, text_b=None, label=None): self.guid = guid self.text_a = text_a self.text_b = text_b self.label = label class PaddingInputExample(object): class InputFeatures(object): def __init__(self, input_ids, input_mask, segment_ids, label_id, is_real_example=True): self.input_ids = input_ids self.input_mask = input_mask self.segment_ids = segment_ids self.label_id = label_id self.is_real_example = is_real_example class DataProcessor(object): def get_train_examples(self, data_dir): raise NotImplementedError() def get_dev_examples(self, data_dir): raise NotImplementedError() def get_test_examples(self, data_dir): raise NotImplementedError() def get_labels(self): raise NotImplementedError() @classmethod def _read_file(cls, input_file, file_type, delimiter): with tf.gfile.Open(input_file, "r") as f: reader = f.readlines() lines = [] for line in reader: lines.append(line.strip()) if file_type == "json": lines = [json.loads(item) for item in lines] else: lines = [item.split(delimiter) for item in lines] return lines class ClassificationProcessor(DataProcessor): def __init__(self, labels, label_column, text_a_column, text_b_column=None, ignore_header=False, min_seq_length=None, file_type="json", delimiter=None): self.language = "zh" self.labels = labels self.label_column = label_column self.text_a_column = text_a_column self.text_b_column = text_b_column self.ignore_header = ignore_header self.min_seq_length = min_seq_length self.file_type = file_type self.delimiter = delimiter def get_train_examples(self, data_dir): return self._create_examples( self._read_file(os.path.join(data_dir, "train."+self.file_type), self.file_type, delimiter=self.delimiter), "train" ) def get_dev_examples(self, data_dir): return self._create_examples( self._read_file(os.path.join(data_dir, "dev."+self.file_type), self.file_type, delimiter=self.delimiter), "dev" ) def get_test_examples(self, data_dir): return self._create_examples( self._read_file(os.path.join(data_dir, "test."+self.file_type), self.file_type, delimiter=self.delimiter), "test" ) def get_labels(self): return self.labels def _create_examples(self, lines, set_type): examples = [] if self.ignore_header: lines = lines[1:] if self.min_seq_length: lines = [line for line in lines if len(line) >= self.min_seq_length] for i, line in enumerate(lines): guid = "%s-%s" %(set_type, i) try: label = tokenization.convert_to_unicode(line[self.label_column]) if set_type != "test" else self.labels[0] text_a = tokenization.convert_to_unicode(line[self.text_a_column]) text_b = None if not self.text_b_column else tokenization.convert_to_unicode(line[self.text_b_column]) examples.append( InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label) ) except Exception: print("### Error {}: {}".format(i, line)) return examples def convert_single_example( ex_index, example, label_list, max_seq_length, tokenizer): if isinstance(example, PaddingInputExample): return [InputFeatures( input_ids=[0]*max_seq_length, input_mask=[0]*max_seq_length, segment_ids=[0]*max_seq_length, label_id=0, is_real_example=False )] label_map = {} for i, label in enumerate(label_list): label_map[label] = i tokens_a = tokenizer.tokenize(example.text_a) tokens_b = None if example.text_b: tokens_b = tokenizer.tokenize(example.text_b) if tokens_b: _truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3) else: if len(tokens_a) > max_seq_length - 2: tokens_a = tokens_a[:max_seq_length - 2] tokens = [] segment_ids = [] tokens.append("[CLS]") segment_ids.append(0) for token in tokens_a: tokens.append(token) segment_ids.append(0) tokens.append("[SEP]") segment_ids.append(0) if tokens_b: for token in tokens_b: tokens.append(token) segment_ids.append(1) tokens.append("[SEP]") segment_ids.append(1) input_ids = tokenizer.convert_tokens_to_ids(tokens) input_mask = [1] * len(input_ids) while len(input_ids) < max_seq_length: input_ids.append(0) input_mask.append(0) segment_ids.append(0) assert len(input_ids) == max_seq_length assert len(input_mask) == max_seq_length assert len(segment_ids) == max_seq_length label_id = label_map[example.label] if ex_index < 5: tf.logging.info("*** Example ***") tf.logging.info("guid: %s" %(example.guid)) tf.logging.info("tokens: %s" % " ".join( [tokenization.printable_text(x) for x in tokens])) tf.logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) tf.logging.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) tf.logging.info("segment_ids: %s" % " ".join([str(x) for x in segment_ids])) tf.logging.info("label: %s (id = %d)" % (example.label, label_id)) feature = InputFeatures( input_ids=input_ids, input_mask=input_mask, segment_ids=segment_ids, label_id=label_id, is_real_example=True ) return feature def file_based_convert_examples_to_features( examples, label_list, max_seq_length, tokenizer, output_file): writer = tf.python_io.TFRecordWriter(output_file) for (ex_index, example) in enumerate(examples): if ex_index % 10000 == 0: tf.logging.info("Writing example %d of %d" % (ex_index, len(examples))) feature = convert_single_example(ex_index, example, label_list, max_seq_length, tokenizer) def create_int_feature(values): f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) return f features = collections.OrderedDict() features["input_ids"] = create_int_feature(feature.input_ids) features["input_mask"] = create_int_feature(feature.input_mask) features["segment_ids"] = create_int_feature(feature.segment_ids) features["label_ids"] = create_int_feature([feature.label_id]) features["is_real_example"] = create_int_feature( [int(feature.is_real_example)]) tf_features = tf.train.Features(feature=features) tf_example = tf.train.Example(features=tf_features) writer.write(tf_example.SerializeToString()) writer.close()
MIT License
beer-garden/beer-garden
src/app/beer_garden/events/processors.py
QueueListener.run
python
def run(self): while not self.stopped(): try: self.process(self._queue.get(timeout=0.1)) except Empty: pass
Process events as they are received
https://github.com/beer-garden/beer-garden/blob/2ea0944d7528a8127bc1b79d16d8fdc668f1c8e4/src/app/beer_garden/events/processors.py#L50-L56
import logging from multiprocessing import Queue from queue import Empty from brewtils.stoppable_thread import StoppableThread logger = logging.getLogger(__name__) class BaseProcessor(StoppableThread): def __init__(self, action=None, **kwargs): super().__init__(**kwargs) self._action = action def process(self, item): try: self._action(item) except Exception as ex: logger.exception(f"Error processing: {ex}") def put(self, item): self.process(item) class QueueListener(BaseProcessor): def __init__(self, queue=None, **kwargs): super().__init__(**kwargs) self._queue = queue or Queue() def put(self, item): self._queue.put(item) def clear(self): while not self._queue.empty(): self._queue.get()
MIT License