repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
ericqaq/puck
puck/response.py
BaseResponse.wsgi_response
python
def wsgi_response(self, environ): obj_iter = self.iterable_item(environ) return obj_iter, self.status, self.get_header_list()
Return the WSGI response as a tuple. :param environ: the WSGI environment :return: (obj_iter, status, header_list) the first one is a iterable object, the second one is a string showing the response status, like '200 OK', the last one is the list of the response header.
https://github.com/ericqaq/puck/blob/ca1fc47e068a9ab70cfa1a5307fe6b9ed8f6d90d/puck/response.py#L158-L168
import warnings from Cookie import SimpleCookie from .data_structures import Header from .utils import generate_content_type from .cookies import generate_cookie from .constants import HTTP_CODES class BaseResponse(object): charset = 'utf-8' default_mimetype = 'text/html' default_status = 200 def __init__(self, status=default_status, response_body=None, header=None, mimetype=None, content_type=None): if isinstance(header, Header): self.header = header elif not header: self.header = Header() else: self.header = Header(header) self._cookies = None if content_type is None: if mimetype is None and 'Content-Type' not in self.header: mimetype = self.default_mimetype if mimetype is not None: mimetype = generate_content_type(mimetype, self.charset) content_type = mimetype if content_type is not None: self.header['Content-Type'] = content_type if isinstance(status, basestring): self.status = status else: try: self.status = '%d %s' % (int(status), HTTP_CODES[int(status)]) except TypeError: self.status = '%d %s' % ( self.default_status, HTTP_CODES[self.default_status]) warnings.warn( 'Status code is initialized to 200, Because the ' 'status code should be int.', UserWarning ) if response_body is None: self.response = [] else: self.response = response_body @property def is_sequence(self): return isinstance(self.response, (list, tuple)) @property def cookies(self): if not self._cookies: self._create_cookie() return self._cookies def _create_cookie(self): _dict = self.__dict__ _dict['_cookies'] = SimpleCookie() def set_cookies(self, key, value='', expires=None, path=None, domain=None, max_age=None, secure=None, httponly=False): morsel = generate_cookie( key, value, expires=expires, path=path, domain=domain, max_age=max_age, secure=secure, httponly=httponly ) self.cookies[key] = morsel def delete_cookies(self, key, path, domain): self.set_cookies(key, path=path, domain=domain, max_age=0) def get_header_list(self): for item in self.cookies: self.header.add('Set-Cookie', self.cookies[item].OutputString()) if self.is_sequence and 'Content-Length' not in self.header: try: content_length = sum((len(str(item))) for item in self.response) except UnicodeError: pass else: self.header.add('Content-Length', str(content_length)) return self.header.head_to_list(charset=self.charset) def iterable_item(self, environ): status_code = int(self.status[:3]) if environ['REQUEST_METHOD'] == 'HEAD' or 100 <= status_code < 200 or status_code in (204, 304): yield () for item in self.response: if isinstance(item, unicode): yield item.encode(self.charset) else: yield str(item)
MIT License
qingyaoai/deep-listwise-context-model-for-ranking-refinement
DLCM/RankLSTM_model.py
RankLSTM.get_batch
python
def get_batch(self, input_seq, output_seq, output_weights, output_initial_score, features): if len(input_seq[0]) != self.rank_list_size: raise ValueError("Input ranklist length must be equal to the one in bucket," " %d != %d." % (len(input_seq[0]), self.rank_list_size)) length = len(input_seq) encoder_inputs, decoder_targets, embeddings, decoder_weights, decoder_initial_scores = [], [], [], [], [] cache = None for _ in xrange(self.batch_size): i = int(random.random() * length) self.prepare_data_with_index(input_seq, output_seq, output_weights, output_initial_score, features, i, encoder_inputs, decoder_targets, embeddings, decoder_weights, decoder_initial_scores) if cache == None: cache = [input_seq[i], decoder_weights[-1]] embedings_length = len(embeddings) for i in xrange(self.batch_size): for j in xrange(self.rank_list_size): if encoder_inputs[i][j] < 0: encoder_inputs[i][j] = embedings_length batch_encoder_inputs = [] batch_weights = [] batch_targets = [] batch_initial_scores = [] for length_idx in xrange(self.rank_list_size): batch_encoder_inputs.append( np.array([encoder_inputs[batch_idx][length_idx] for batch_idx in xrange(self.batch_size)], dtype=np.float32)) for length_idx in xrange(self.rank_list_size): batch_targets.append( np.array([decoder_targets[batch_idx][length_idx] for batch_idx in xrange(self.batch_size)], dtype=np.int32)) batch_weights.append( np.array([decoder_weights[batch_idx][length_idx] for batch_idx in xrange(self.batch_size)], dtype=np.float32)) batch_initial_scores.append( np.array([decoder_initial_scores[batch_idx][length_idx] for batch_idx in xrange(self.batch_size)], dtype=np.float32)) return batch_encoder_inputs, embeddings, batch_targets, batch_weights, batch_initial_scores, cache
Get a random batch of data from the specified bucket, prepare for step.
https://github.com/qingyaoai/deep-listwise-context-model-for-ranking-refinement/blob/cdfcb48294ccf6b90a6e576f3fa40f6749c6ccdd/DLCM/RankLSTM_model.py#L266-L319
from __future__ import absolute_import from __future__ import division from __future__ import print_function import math import os import random import sys import time import numpy as np from six.moves import xrange import tensorflow as tf from six.moves import xrange from six.moves import zip import copy from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import embedding_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn_ops from tensorflow.python.ops import rnn_cell_impl from tensorflow.python.ops import variable_scope from tensorflow.python.ops import init_ops from tensorflow.python.framework import tensor_shape from tensorflow.python.ops import logging_ops from tensorflow.python.ops import tensor_array_ops from tensorflow.python.ops import variable_scope as vs from tensorflow.python.util import nest linear = rnn_cell_impl._linear class RankLSTM(object): def __init__(self, rank_list_size, embed_size, expand_embed_size, batch_size, hparam_str, forward_only=False, feed_previous = False): self.hparams = tf.contrib.training.HParams( learning_rate=0.5, learning_rate_decay_factor=0.8, max_gradient_norm=5.0, reverse_input=True, num_layers=1, num_heads=3, loss_func='attrank', l2_loss=0.0, att_strategy='add', use_residua=False, use_lstm=False, softRank_theta=0.1, ) self.hparams.parse(hparam_str) self.start_index = 0 self.count = 1 self.rank_list_size = rank_list_size self.embed_size = embed_size self.expand_embed_size = expand_embed_size if expand_embed_size > 0 else 0 self.batch_size = batch_size self.learning_rate = tf.Variable(float(self.hparams.learning_rate), trainable=False) self.learning_rate_decay_op = self.learning_rate.assign( self.learning_rate * self.hparams.learning_rate_decay_factor) self.global_step = tf.Variable(0, trainable=False) output_projection = None self.encoder_inputs = [] self.decoder_inputs = [] self.embeddings = tf.placeholder(tf.float32, shape=[None, embed_size], name="embeddings") self.target_labels = [] self.target_weights = [] self.target_initial_score = [] for i in xrange(self.rank_list_size): self.encoder_inputs.append(tf.placeholder(tf.int64, shape=[None], name="encoder{0}".format(i))) for i in xrange(self.rank_list_size): self.target_labels.append(tf.placeholder(tf.int64, shape=[None], name="targets{0}".format(i))) self.target_weights.append(tf.placeholder(tf.float32, shape=[None], name="weight{0}".format(i))) self.target_initial_score.append(tf.placeholder(tf.float32, shape=[None], name="initial_score{0}".format(i))) single_cell = tf.contrib.rnn.GRUCell(embed_size + expand_embed_size) double_cell = tf.contrib.rnn.GRUCell((embed_size + expand_embed_size) * 2) if self.hparams.use_lstm: single_cell = tf.contrib.rnn.BasicLSTMCell((embed_size + expand_embed_size)) double_cell = tf.contrib.rnn.BasicLSTMCell((embed_size + expand_embed_size) * 2) cell = single_cell self.double_cell = double_cell if self.hparams.num_layers > 1: cell = tf.contrib.rnn.MultiRNNCell([single_cell] * self.hparams.num_layers) self.double_cell = tf.contrib.rnn.MultiRNNCell([double_cell] * self.hparams.num_layers) self.batch_index_bias = tf.placeholder(tf.int64, shape=[None]) self.batch_expansion_mat = tf.placeholder(tf.float32, shape=[None,1]) self.batch_diag = tf.placeholder(tf.float32, shape=[None,self.rank_list_size,self.rank_list_size]) self.GO_embed = tf.get_variable("GO_embed", [1,self.embed_size + expand_embed_size],dtype=tf.float32) self.PAD_embed = tf.get_variable("PAD_embed", [1,self.embed_size],dtype=tf.float32) self.outputs, self.state= self.embedding_rnn_seq2seq(self.encoder_inputs, self.embeddings, cell, output_projection, forward_only or feed_previous) if self.hparams.use_residua: self.outputs[0] = self.outputs[0] + tf.stack(self.target_initial_score, axis=1) print('Loss Function is ' + self.hparams.loss_func) self.loss = None if self.hparams.loss_func == 'attrank': self.loss = self.attrank_loss(self.outputs[0], self.target_labels, self.target_weights) elif self.hparams.loss_func == 'listMLE': self.loss = self.listMLE(self.outputs[0], self.target_labels, self.target_weights) elif self.hparams.loss_func == 'softRank': self.loss = self.softRank(self.outputs[0], self.target_labels, self.target_weights) params = tf.trainable_variables() if self.hparams.l2_loss > 0: for p in params: self.loss += self.hparams.l2_loss * tf.nn.l2_loss(p) if not forward_only: opt = tf.train.GradientDescentOptimizer(self.learning_rate) self.gradients = tf.gradients(self.loss, params) self.clipped_gradients, self.norm = tf.clip_by_global_norm(self.gradients, self.hparams.max_gradient_norm) self.updates = opt.apply_gradients(zip(self.clipped_gradients, params), global_step=self.global_step) tf.summary.scalar('Training Loss', tf.reduce_mean(self.loss)) tf.summary.scalar('Learning Rate', self.learning_rate) else: tf.summary.scalar('Validation/Test Loss', tf.reduce_mean(self.loss)) self.summary = tf.summary.merge_all() self.saver = tf.train.Saver(tf.global_variables()) def step(self, session, encoder_inputs, embeddings, target_labels, target_weights, target_initial_score, forward_only): if len(encoder_inputs) != self.rank_list_size: raise ValueError("Encoder length must be equal to the one in bucket," " %d != %d." % (len(encoder_inputs), self.rank_list_size)) if len(target_labels) != self.rank_list_size: raise ValueError("Labels length must be equal to the one in bucket," " %d != %d." % (len(target_labels), self.rank_list_size)) if len(target_weights) != self.rank_list_size: raise ValueError("Weights length must be equal to the one in bucket," " %d != %d." % (len(target_weights), self.rank_list_size)) input_feed = {} input_feed[self.batch_index_bias.name] = np.array([i * self.rank_list_size for i in xrange(self.batch_size)]) input_feed[self.batch_expansion_mat.name] = np.ones((self.batch_size,1)) input_feed[self.batch_diag.name] = np.array([np.diag([0.5 for x in xrange(self.rank_list_size)]) for _ in xrange(self.batch_size)]) input_feed[self.embeddings.name] = np.array(embeddings) for l in xrange(self.rank_list_size): input_feed[self.encoder_inputs[l].name] = encoder_inputs[l] for l in xrange(self.rank_list_size): input_feed[self.target_weights[l].name] = target_weights[l] input_feed[self.target_labels[l].name] = target_labels[l] input_feed[self.target_initial_score[l].name] = target_initial_score[l] if not forward_only: output_feed = [self.updates, self.norm, self.loss, self.summary ] else: output_feed = [self.loss, self.summary ] output_feed += self.outputs outputs = session.run(output_feed, input_feed) if not forward_only: return outputs[1], outputs[2], None, outputs[-1] else: return None, outputs[0], outputs[2:], outputs[1] def prepare_data_with_index(self, input_seq, output_seq, output_weights, output_initial_score, features, index, encoder_inputs, decoder_targets, embeddings, decoder_weights, decoder_initial_scores): alpha = 1.0 i = index base = len(embeddings) for x in input_seq[i]: if x >= 0: embeddings.append(features[x]) decoder_targets.append([x if output_seq[i][x] < 0 else output_seq[i][x] for x in xrange(len(output_seq[i]))]) if self.hparams.reverse_input: encoder_inputs.append(list(reversed([-1 if input_seq[i][x] < 0 else base+x for x in xrange(len(input_seq[i]))]))) decoder_weights.append(list(reversed([0 if input_seq[i][x] < 0 else output_weights[i][x] for x in xrange(len(input_seq[i]))]))) decoder_initial_scores.append(list(reversed([0 if input_seq[i][x] < 0 else output_initial_score[i][x] for x in xrange(len(input_seq[i]))]))) if self.hparams.loss_func == 'attrank': weight_sum = 0 for w in xrange(len(decoder_weights[-1])): decoder_weights[-1][w] = math.exp(decoder_weights[-1][w]) if decoder_weights[-1][w] > 0 else 0 weight_sum += decoder_weights[-1][w] if weight_sum > 0: for w in xrange(len(decoder_weights[-1])): decoder_weights[-1][w] /= weight_sum for j in xrange(len(decoder_targets[-1])): decoder_targets[-1][j] = self.rank_list_size - 1 - decoder_targets[-1][j] else: encoder_input = [] decoder_weight = [] decoder_initial_score = [] tmp = 0 for x in xrange(len(input_seq[i])): if input_seq[i][x] < 0: encoder_input.append(-1) decoder_weight.append(-1) tmp += 1 else: encoder_input.append(base+x-tmp) decoder_weight.append(output_weights[i][x-tmp]) decoder_initial_score.append(output_initial_score[i][x-tmp]) encoder_inputs.append(encoder_input) decoder_weights.append(decoder_weight) decoder_initial_scores.append(decoder_initial_score) count = 0 for x in encoder_inputs[-1]: if x < 0: count += 1 for j in xrange(len(decoder_targets[-1])): index = count + decoder_targets[-1][j] if index < self.rank_list_size: decoder_targets[-1][j] = index else: decoder_targets[-1][j] = index - self.rank_list_size for x in xrange(len(decoder_weights[-1])): decoder_weights[-1][x] *= alpha
Apache License 2.0
pawamoy/aria2p
src/aria2p/downloads.py
Download.total_length_string
python
def total_length_string(self, human_readable: bool = True) -> str: if human_readable: return human_readable_bytes(self.total_length, delim=" ") return str(self.total_length) + " B"
Return the total length as string. Arguments: human_readable: Return in human readable format or not. Returns: The total length string.
https://github.com/pawamoy/aria2p/blob/6cdc9a1ef5ed0413fffa3be4885f4b5325177660/src/aria2p/downloads.py#L484-L496
from datetime import datetime, timedelta from pathlib import Path from typing import List, Optional from loguru import logger import aria2p from aria2p.client import ClientException from aria2p.options import Options from aria2p.types import PathOrStr from aria2p.utils import bool_or_value, human_readable_bytes, human_readable_timedelta class BitTorrent: def __init__(self, struct: dict) -> None: self._struct = struct or {} def __str__(self): return self.info["name"] @property def announce_list(self) -> Optional[List[List[str]]]: return self._struct.get("announceList") @property def comment(self) -> Optional[str]: return self._struct.get("comment") @property def creation_date(self) -> datetime: return datetime.fromtimestamp(self._struct["creationDate"]) @property def mode(self) -> Optional[str]: return self._struct.get("mode") @property def info(self) -> Optional[dict]: return self._struct.get("info") class File: def __init__(self, struct: dict) -> None: self._struct = struct or {} def __str__(self): return str(self.path) def __eq__(self, other): return self.path == other.path @property def index(self) -> int: return int(self._struct["index"]) @property def path(self) -> Path: return Path(self._struct["path"]) @property def is_metadata(self) -> bool: return str(self.path).startswith("[METADATA]") @property def length(self) -> int: return int(self._struct["length"]) def length_string(self, human_readable: bool = True) -> str: if human_readable: return human_readable_bytes(self.length, delim=" ") return str(self.length) + " B" @property def completed_length(self) -> int: return int(self._struct["completedLength"]) def completed_length_string(self, human_readable: bool = True) -> str: if human_readable: return human_readable_bytes(self.completed_length, delim=" ") return str(self.completed_length) + " B" @property def selected(self) -> bool: return bool_or_value(self._struct["selected"]) @property def uris(self) -> List[dict]: return self._struct.get("uris", []) class Download: def __init__(self, api: "aria2p.api.API", struct: dict) -> None: self.api = api self._struct = struct or {} self._files: List[File] = [] self._root_files_paths: List[Path] = [] self._bittorrent: Optional[BitTorrent] = None self._name = "" self._options: Optional[Options] = None self._followed_by: Optional[List[Download]] = None self._following: Optional[Download] = None self._belongs_to: Optional[Download] = None def __str__(self): return self.name def __eq__(self, other): return self.gid == other.gid def update(self) -> None: self._struct = self.api.client.tell_status(self.gid) self._files = [] self._name = "" self._bittorrent = None self._followed_by = None self._following = None self._belongs_to = None self._options = None @property def live(self) -> "Download": self.update() return self @property def name(self) -> str: if not self._name: if self.bittorrent and self.bittorrent.info: self._name = self.bittorrent.info["name"] elif self.files[0].is_metadata: self._name = str(self.files[0].path) else: file_path = str(self.files[0].path.absolute()) dir_path = str(self.dir.absolute()) if file_path.startswith(dir_path): start_pos = len(dir_path) + 1 self._name = Path(file_path[start_pos:]).parts[0] else: try: self._name = self.files[0].uris[0]["uri"].split("/")[-1] except IndexError: pass return self._name @property def control_file_path(self) -> Path: return self.dir / (self.name + ".aria2") @property def root_files_paths(self) -> List[Path]: if not self._root_files_paths: paths = [] for file in self.files: if file.is_metadata: continue try: relative_path = file.path.relative_to(self.dir) except ValueError as error: logger.warning(f"Can't determine file path '{file.path}' relative to '{self.dir}'") logger.opt(exception=True).trace(error) else: path = self.dir / relative_path.parts[0] if path not in paths: paths.append(path) self._root_files_paths = paths return self._root_files_paths @property def options(self) -> Options: if not self._options: self.update_options() return self._options @options.setter def options(self, value): self._options = value def update_options(self) -> None: self._options = self.api.get_options(downloads=[self])[0] @property def gid(self) -> str: return self._struct["gid"] @property def status(self) -> str: return self._struct["status"] @property def is_active(self) -> bool: return self.status == "active" @property def is_waiting(self) -> bool: return self.status == "waiting" @property def is_paused(self) -> bool: return self.status == "paused" @property def has_failed(self) -> bool: return self.status == "error" @property def is_complete(self) -> bool: return self.status == "complete" @property def is_removed(self) -> bool: return self.status == "removed" @property def is_metadata(self) -> bool: return all(_.is_metadata for _ in self.files) @property def is_torrent(self) -> bool: return "bittorrent" in self._struct @property def total_length(self) -> int: return int(self._struct["totalLength"])
ISC License
sibirrer/lenstronomy
lenstronomy/LensModel/MultiPlane/multi_plane_base.py
MultiPlaneBase.transverse_distance_start_stop
python
def transverse_distance_start_stop(self, z_start, z_stop, include_z_start=False): z_lens_last = z_start first_deflector = True T_ij_start = None for i, idex in enumerate(self._sorted_redshift_index): z_lens = self._lens_redshift_list[idex] if self._start_condition(include_z_start, z_lens, z_start) and z_lens <= z_stop: if first_deflector is True: T_ij_start = self._cosmo_bkg.T_xy(z_start, z_lens) first_deflector = False z_lens_last = z_lens T_ij_end = self._cosmo_bkg.T_xy(z_lens_last, z_stop) return T_ij_start, T_ij_end
computes the transverse distance (T_ij) that is required by the ray-tracing between the starting redshift and the first deflector afterwards and the last deflector before the end of the ray-tracing. :param z_start: redshift of the start of the ray-tracing :param z_stop: stop of ray-tracing :param include_z_start: boolean, if True includes the computation of the starting position if the first deflector is at z_start :return: T_ij_start, T_ij_end
https://github.com/sibirrer/lenstronomy/blob/e6d0e179a98ecb0c4db25cdf7cfb73e83c6aeded/lenstronomy/LensModel/MultiPlane/multi_plane_base.py#L137-L159
import numpy as np from lenstronomy.Cosmo.background import Background from lenstronomy.LensModel.profile_list_base import ProfileListBase import lenstronomy.Util.constants as const __all__ = ['MultiPlaneBase'] class MultiPlaneBase(ProfileListBase): def __init__(self, lens_model_list, lens_redshift_list, z_source_convention, cosmo=None, numerical_alpha_class=None, cosmo_interp=False, z_interp_stop=None, num_z_interp=100, kwargs_interp=None): if z_interp_stop is None: z_interp_stop = z_source_convention self._cosmo_bkg = Background(cosmo, interp=cosmo_interp, z_stop=z_interp_stop, num_interp=num_z_interp) self._z_source_convention = z_source_convention if len(lens_redshift_list) > 0: z_lens_max = np.max(lens_redshift_list) if z_lens_max >= z_source_convention: raise ValueError('deflector redshifts higher or equal the source redshift convention (%s >= %s for the reduced lens' ' model quantities not allowed (leads to negative reduced deflection angles!' % (z_lens_max, z_source_convention)) if not len(lens_model_list) == len(lens_redshift_list): raise ValueError("The length of lens_model_list does not correspond to redshift_list") self._lens_redshift_list = lens_redshift_list super(MultiPlaneBase, self).__init__(lens_model_list, numerical_alpha_class=numerical_alpha_class, lens_redshift_list=lens_redshift_list, z_source_convention=z_source_convention, kwargs_interp=kwargs_interp) if len(lens_model_list) < 1: self._sorted_redshift_index = [] else: self._sorted_redshift_index = self._index_ordering(lens_redshift_list) z_before = 0 T_z = 0 self._T_ij_list = [] self._T_z_list = [] if len(lens_model_list)<1: self._reduced2physical_factor = [] else: z_sort = np.array(self._lens_redshift_list)[self._sorted_redshift_index] z_source_array = np.ones(z_sort.shape)*z_source_convention self._reduced2physical_factor = self._cosmo_bkg.d_xy(0, z_source_convention) / self._cosmo_bkg.d_xy(z_sort, z_source_array) for idex in self._sorted_redshift_index: z_lens = self._lens_redshift_list[idex] if z_before == z_lens: delta_T = 0 else: T_z = self._cosmo_bkg.T_xy(0, z_lens) delta_T = self._cosmo_bkg.T_xy(z_before, z_lens) self._T_ij_list.append(delta_T) self._T_z_list.append(T_z) z_before = z_lens def ray_shooting_partial(self, x, y, alpha_x, alpha_y, z_start, z_stop, kwargs_lens, include_z_start=False, T_ij_start=None, T_ij_end=None): x = np.array(x, dtype=float) y = np.array(y, dtype=float) alpha_x = np.array(alpha_x) alpha_y = np.array(alpha_y) z_lens_last = z_start first_deflector = True for i, idex in enumerate(self._sorted_redshift_index): z_lens = self._lens_redshift_list[idex] if self._start_condition(include_z_start, z_lens, z_start) and z_lens <= z_stop: if first_deflector is True: if T_ij_start is None: if z_start == 0: delta_T = self._T_ij_list[0] else: delta_T = self._cosmo_bkg.T_xy(z_start, z_lens) else: delta_T = T_ij_start first_deflector = False else: delta_T = self._T_ij_list[i] x, y = self._ray_step_add(x, y, alpha_x, alpha_y, delta_T) alpha_x, alpha_y = self._add_deflection(x, y, alpha_x, alpha_y, kwargs_lens, i) z_lens_last = z_lens if T_ij_end is None: if z_lens_last == z_stop: delta_T = 0 else: delta_T = self._cosmo_bkg.T_xy(z_lens_last, z_stop) else: delta_T = T_ij_end x, y = self._ray_step_add(x, y, alpha_x, alpha_y, delta_T) return x, y, alpha_x, alpha_y
MIT License
apache/incubator-retired-cotton
mysos/scheduler/http.py
MysosServer.create
python
def create(self, clustername): cluster_name = clustername num_nodes = bottle.request.forms.get('num_nodes', default=1) cluster_user = bottle.request.forms.get('cluster_user', default=None) backup_id = bottle.request.forms.get('backup_id', default=None) size = bottle.request.forms.get('size', default=None) cluster_password = bottle.request.forms.get('cluster_password', default=None) try: cluster_zk_url, cluster_password = self._scheduler.create_cluster( cluster_name, cluster_user, num_nodes, size, backup_id=backup_id, cluster_password=cluster_password) return json.dumps(dict(cluster_url=cluster_zk_url, cluster_password=cluster_password)) except MysosScheduler.ClusterExists as e: raise bottle.HTTPResponse(e.message, status=409) except MysosScheduler.InvalidUser as e: raise bottle.HTTPResponse(e.message, status=400) except MysosScheduler.ServiceUnavailable as e: raise bottle.HTTPResponse(e.message, status=503) except ValueError as e: raise bottle.HTTPResponse(e.message, status=400)
Create a db cluster.
https://github.com/apache/incubator-retired-cotton/blob/4aa9bb0acdd8c609686b5d370ef4b61a520364ef/mysos/scheduler/http.py#L26-L51
import json import os from .launcher import MySQLClusterLauncher from .scheduler import MysosScheduler import bottle from mako.template import Template from twitter.common.http import HttpServer, route, static_file class MysosServer(HttpServer): def __init__(self, scheduler, asset_dir, metric_sampler): super(MysosServer, self).__init__() self._scheduler = scheduler self._asset_dir = asset_dir self._static_dir = os.path.join(self._asset_dir, 'static') self._template_dir = os.path.join(self._asset_dir, 'templates') self._clusters_template = Template(filename=os.path.join(self._template_dir, 'clusters.html')) self._metric_sampler = metric_sampler @route('/clusters/<clustername>', method=['POST'])
Apache License 2.0
pwndbg/pwndbg
pwndbg/commands/__init__.py
AddressExpr
python
def AddressExpr(s): val = sloppy_gdb_parse(s) if not isinstance(val, int): raise argparse.ArgumentError('Incorrect address (or GDB expression): %s' % s) return val
Parses an address expression. Returns an int.
https://github.com/pwndbg/pwndbg/blob/95e3bb09dfb1d64b132b3e6ffae8882287aa7c24/pwndbg/commands/__init__.py#L352-L361
import argparse import functools import gdb import pwndbg.chain import pwndbg.color import pwndbg.color.message as message import pwndbg.enhance import pwndbg.exception import pwndbg.hexdump import pwndbg.memory import pwndbg.regs import pwndbg.symbol import pwndbg.ui commands = [] command_names = set() def list_current_commands(): current_pagination = gdb.execute('show pagination', to_string=True) current_pagination = current_pagination.split()[-1].rstrip('.') gdb.execute('set pagination off') command_list = gdb.execute('help all', to_string=True).strip().split('\n') existing_commands = set() for line in command_list: line = line.strip() if len(line) == 0 or line.startswith('Command class:') or line.startswith('Unclassified commands'): continue command = line.split()[0] existing_commands.add(command) gdb.execute('set pagination %s' % current_pagination) return existing_commands GDB_BUILTIN_COMMANDS = list_current_commands() class Command(gdb.Command): builtin_override_whitelist = {'up', 'down', 'search', 'pwd', 'start'} history = {} def __init__(self, function, prefix=False, command_name=None): if command_name is None: command_name = function.__name__ super(Command, self).__init__(command_name, gdb.COMMAND_USER, gdb.COMPLETE_EXPRESSION, prefix=prefix) self.function = function if command_name in command_names: raise Exception('Cannot add command %s: already exists.' % command_name) if command_name in GDB_BUILTIN_COMMANDS and command_name not in self.builtin_override_whitelist: raise Exception('Cannot override non-whitelisted built-in command "%s"' % command_name) command_names.add(command_name) commands.append(self) functools.update_wrapper(self, function) self.__name__ = command_name self.repeat = False def split_args(self, argument): return gdb.string_to_argv(argument), {} def invoke(self, argument, from_tty): try: args, kwargs = self.split_args(argument) except SystemExit: return except (TypeError, gdb.error): pwndbg.exception.handle(self.function.__name__) return try: self.repeat = self.check_repeated(argument, from_tty) return self(*args, **kwargs) finally: self.repeat = False def check_repeated(self, argument, from_tty): if not from_tty: return False lines = gdb.execute('show commands', from_tty=False, to_string=True) lines = lines.splitlines() if not lines: return False last_line = lines[-1] number, command = last_line.split(None, 1) try: number = int(number) except ValueError: return False if number not in Command.history: Command.history[number] = command return False if not command.endswith(argument): return False return True def __call__(self, *args, **kwargs): try: return self.function(*args, **kwargs) except TypeError as te: print('%r: %s' % (self.function.__name__.strip(), self.function.__doc__.strip())) pwndbg.exception.handle(self.function.__name__) except Exception: pwndbg.exception.handle(self.function.__name__) class ParsedCommand(Command): sloppy = False quiet = False def split_args(self, argument): argv, _ = super(ParsedCommand, self).split_args(argument) return list(filter(lambda x: x is not None, map(self.fix, argv))), {} def fix(self, arg): return fix(arg, self.sloppy, self.quiet) class ParsedCommandPrefix(ParsedCommand): def __init__(self, function, prefix=True): super(ParsedCommand, self).__init__(function, prefix) def fix(arg, sloppy=False, quiet=True, reraise=False): if isinstance(arg, gdb.Value): return arg try: parsed = gdb.parse_and_eval(arg) return parsed except Exception: pass try: arg = pwndbg.regs.fix(arg) return gdb.parse_and_eval(arg) except Exception as e: if not quiet: print(e) if reraise: raise e pass if sloppy: return arg return None def fix_int(*a, **kw): return int(fix(*a,**kw)) def fix_int_reraise(*a, **kw): return fix(*a, reraise=True, **kw) def OnlyWithFile(function): @functools.wraps(function) def _OnlyWithFile(*a, **kw): if pwndbg.proc.exe: return function(*a, **kw) else: if pwndbg.qemu.is_qemu(): print(message.error("Could not determine the target binary on QEMU.")) else: print(message.error("%s: There is no file loaded." % function.__name__)) return _OnlyWithFile def OnlyWhenRunning(function): @functools.wraps(function) def _OnlyWhenRunning(*a, **kw): if pwndbg.proc.alive: return function(*a, **kw) else: print("%s: The program is not being run." % function.__name__) return _OnlyWhenRunning def OnlyWithTcache(function): @functools.wraps(function) def _OnlyWithTcache(*a, **kw): if pwndbg.heap.current.has_tcache(): return function(*a, **kw) else: print("%s: This version of GLIBC was not compiled with tcache support." % function.__name__) return _OnlyWithTcache def OnlyWhenHeapIsInitialized(function): @functools.wraps(function) def _OnlyWhenHeapIsInitialized(*a, **kw): if pwndbg.heap.current.is_initialized(): return function(*a, **kw) else: print("%s: Heap is not initialized yet." % function.__name__) return _OnlyWhenHeapIsInitialized def OnlyAmd64(function): @functools.wraps(function) def _OnlyAmd64(*a, **kw): if pwndbg.arch.current == "x86-64": return function(*a, **kw) else: print("%s: Only works with \"x86-64\" arch." % function.__name__) return _OnlyAmd64 def OnlyWithLibcDebugSyms(function): @functools.wraps(function) def _OnlyWithLibcDebugSyms(*a, **kw): if pwndbg.heap.current.libc_has_debug_syms(): return function(*a, **kw) else: print('''%s: This command only works with libc debug symbols. They can probably be installed via the package manager of your choice. See also: https://sourceware.org/gdb/onlinedocs/gdb/Separate-Debug-Files.html E.g. on Ubuntu/Debian you might need to do the following steps (for 64-bit and 32-bit binaries): sudo apt-get install libc6-dbg sudo dpkg --add-architecture i386 sudo apt-get install libc-dbg:i386 ''' % function.__name__) return _OnlyWithLibcDebugSyms class QuietSloppyParsedCommand(ParsedCommand): def __init__(self, *a, **kw): super(QuietSloppyParsedCommand, self).__init__(*a, **kw) self.quiet = True self.sloppy = True class _ArgparsedCommand(Command): def __init__(self, parser, function, command_name=None, *a, **kw): self.parser = parser if command_name is None: self.parser.prog = function.__name__ else: self.parser.prog = command_name self.__doc__ = function.__doc__ = self.parser.description.strip() super(_ArgparsedCommand, self).__init__(function, command_name=command_name, *a, **kw) def split_args(self, argument): argv = gdb.string_to_argv(argument) return tuple(), vars(self.parser.parse_args(argv)) class ArgparsedCommand: def __init__(self, parser_or_desc, aliases=[]): if isinstance(parser_or_desc, str): self.parser = argparse.ArgumentParser(description=parser_or_desc) else: self.parser = parser_or_desc self.aliases = aliases for action in self.parser._actions: if action.dest == 'help': continue if action.type in (int, None): action.type = fix_int_reraise if action.default is not None: action.help += ' (default: %(default)s)' def __call__(self, function): for alias in self.aliases: _ArgparsedCommand(self.parser, function, alias) return _ArgparsedCommand(self.parser, function) _mask = 0xffffffffFFFFFFFF _mask_val_type = gdb.Value(_mask).type def sloppy_gdb_parse(s): try: val = gdb.parse_and_eval(s) return int(val.cast(_mask_val_type)) except (TypeError, gdb.error): return s
MIT License
pymeasure/pymeasure
pymeasure/instruments/ni/virtualbench.py
VirtualBench.get_library_version
python
def get_library_version(self): return self.vb.get_library_version()
Return the version of the VirtualBench runtime library
https://github.com/pymeasure/pymeasure/blob/658d8fb9a02bdb62f64cc3838875c0de12f49ca1/pymeasure/instruments/ni/virtualbench.py#L135-L138
import logging import re from ctypes import (c_bool, c_size_t, c_double, c_uint8, c_int32, c_uint32, c_int64, c_uint64, c_wchar, c_wchar_p, Structure, c_int, cdll, byref) from datetime import datetime, timezone, timedelta import numpy as np import pandas as pd from pymeasure.instruments import Instrument, RangeException from pymeasure.instruments.validators import ( strict_discrete_set, strict_discrete_range, truncated_discrete_set, strict_range ) log = logging.getLogger(__name__) log.addHandler(logging.NullHandler()) try: import pyvirtualbench as pyvb except ModuleNotFoundError as err: log.info('Failed loading the pyvirtualbench package. ' + 'Check the NI VirtualBench documentation on how to ' + 'install this external dependency. ' + 'ImportError: {}'.format(err)) raise class VirtualBench_Direct(pyvb.PyVirtualBench): def __init__(self, device_name='', name='VirtualBench'): self.device_name = device_name self.name = name self.nilcicapi = cdll.LoadLibrary("nilcicapi") self.library_handle = c_int(0) status = self.nilcicapi.niVB_Initialize(pyvb.NIVB_LIBRARY_VERSION, byref(self.library_handle)) if (status != pyvb.Status.SUCCESS): raise pyvb.PyVirtualBenchException(status, self.nilcicapi, self.library_handle) log.info("Initializing %s." % self.name) def __del__(self): self.release() class VirtualBench(): def __init__(self, device_name='', name='VirtualBench'): self.device_name = device_name self.name = name self.vb = pyvb.PyVirtualBench(self.device_name) log.info("Initializing %s." % self.name) def __del__(self): if self.vb.library_handle is not None: self.vb.release() def shutdown(self): log.info("Shutting down %s" % self.name) self.vb.release() self.isShutdown = True
MIT License
openbazaar/openbazaar-server
market/moderation.py
process_dispute
python
def process_dispute(contract, db, message_listener, notification_listener, testnet): tmp_contract = deepcopy(contract) if "vendor_order_confirmation" in tmp_contract: del tmp_contract["vendor_order_confirmation"] if "buyer_receipt" in tmp_contract: del tmp_contract["buyer_receipt"] del tmp_contract["dispute"] order_id = digest(json.dumps(tmp_contract, indent=4)).encode("hex") own_guid = KeyChain(db).guid.encode("hex") if contract["dispute"]["info"]["guid"] == contract["vendor_offer"]["listing"]["id"]["guid"]: guid = unhexlify(contract["vendor_offer"]["listing"]["id"]["guid"]) public_key = unhexlify(contract["vendor_offer"]["listing"]["id"]["pubkeys"]["guid"]) if "blockchain_id" in contract["vendor_offer"]["listing"]["id"]: handle = contract["vendor_offer"]["listing"]["id"]["blockchain_id"] else: handle = "" proof_sig = None elif contract["dispute"]["info"]["guid"] == contract["buyer_order"]["order"]["id"]["guid"]: guid = unhexlify(contract["buyer_order"]["order"]["id"]["guid"]) public_key = unhexlify(contract["buyer_order"]["order"]["id"]["pubkeys"]["guid"]) if "blockchain_id" in contract["buyer_order"]["order"]["id"]: handle = contract["buyer_order"]["order"]["id"]["blockchain_id"] else: handle = "" proof_sig = contract["dispute"]["info"]["proof_sig"] else: raise Exception("Dispute guid not in contract") verify_key = nacl.signing.VerifyKey(public_key) verify_key.verify(json.dumps(contract["dispute"]["info"], indent=4), base64.b64decode(contract["dispute"]["signature"])) p = PlaintextMessage() p.sender_guid = guid p.handle = handle p.pubkey = public_key p.subject = str(order_id) p.type = PlaintextMessage.Type.Value("DISPUTE_OPEN") p.message = str(contract["dispute"]["info"]["claim"]) p.timestamp = int(time.time()) p.avatar_hash = unhexlify(str(contract["dispute"]["info"]["avatar_hash"])) if db.purchases.get_purchase(order_id) is not None: db.purchases.status_changed(order_id, 1) db.purchases.update_status(order_id, 4) elif db.sales.get_sale(order_id) is not None: db.sales.status_changed(order_id, 1) db.sales.update_status(order_id, 4) elif "moderators" in contract["vendor_offer"]["listing"]: is_selected = False for moderator in contract["vendor_offer"]["listing"]["moderators"]: if moderator["guid"] == own_guid and contract["buyer_order"]["order"]["moderator"] == own_guid: is_selected = True if not is_selected: raise Exception("Not a moderator for this contract") else: if "blockchain_id" in contract["vendor_offer"]["listing"]["id"] and contract["vendor_offer"]["listing"]["id"]["blockchain_id"] != "": vendor = contract["vendor_offer"]["listing"]["id"]["blockchain_id"] else: vendor = contract["vendor_offer"]["listing"]["id"]["guid"] if "blockchain_id" in contract["buyer_order"]["order"]["id"] and contract["buyer_order"]["order"]["id"]["blockchain_id"] != "": buyer = contract["buyer_order"]["order"]["id"]["blockchain_id"] else: buyer = contract["buyer_order"]["order"]["id"]["guid"] c = Contract(db, contract=contract, testnet=testnet) validation_failures = c.validate_for_moderation(proof_sig) db.cases.new_case(order_id, contract["vendor_offer"]["listing"]["item"]["title"], time.time(), contract["buyer_order"]["order"]["date"], float(contract["buyer_order"]["order"]["payment"]["amount"]), contract["vendor_offer"]["listing"]["item"]["image_hashes"][0], buyer, vendor, json.dumps(validation_failures), contract["dispute"]["info"]["claim"]) with open(os.path.join(DATA_FOLDER, "cases", order_id + ".json"), 'wb') as outfile: outfile.write(json.dumps(contract, indent=4)) else: raise Exception("Order ID for dispute not found") message_listener.notify(p, "") title = contract["vendor_offer"]["listing"]["item"]["title"] notification_listener.notify(guid, handle, "dispute_open", order_id, title, unhexlify(contract["vendor_offer"]["listing"]["item"]["image_hashes"][0])) notification = SMTPNotification(db) guid = guid.encode("hex") notification.send("[OpenBazaar] Dispute Opened", "A dispute has been opened.\n\n" "Order: %s\n" "Opened By: %s\n" "Title: %s" % (order_id, guid, title))
This function processes a dispute message received from another node. It checks the contract to see if this a dispute for a purchase we made, a dispute for one of our sales, or a new case if we are the moderator. If it's a purchase or sale it will update the order status to disputed and push a notification to the listener. If it's a new case it will validate the contract, create a new case in the db, and push a notification to the listener. Args: contract: a json contract of the current order state. Should have a "dispute" object attached with dispute info. db: a `Database` object. message_listener: a `MessageListenerImpl` object. notification_listener: a `NotificationListenerImpl` object. testnet: `bool` of whether we're on testnet or not.
https://github.com/openbazaar/openbazaar-server/blob/6bfd8f0c899d7133dadd38d1afdd537a2e182ff1/market/moderation.py#L17-L139
import base64 import json import nacl.signing import os import time from binascii import unhexlify from collections import OrderedDict from config import DATA_FOLDER from copy import deepcopy from dht.utils import digest from keys.keychain import KeyChain from market.contracts import Contract from protos.objects import PlaintextMessage from market.smtpnotification import SMTPNotification
MIT License
duerrp/pyexperiment
pyexperiment/replicate.py
collect_results
python
def collect_results(key, subkey_pattern=SUBSTATE_KEY_PATTERN, no_replicates=None): result = [] no_replicates = no_replicates or int(conf['pyexperiment.n_replicates']) for i in range(no_replicates): with substate_context(subkey_pattern % i): result.append(state[key]) return result
Collect the results of a replicated experiment in a list
https://github.com/duerrp/pyexperiment/blob/c426565d870d944bd5b9712629d8f1ba2527c67f/pyexperiment/replicate.py#L127-L137
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import import multiprocessing import functools import traceback import threading from pyexperiment.state_context import substate_context from pyexperiment.state_context import processing_state_context from pyexperiment import conf from pyexperiment import state from pyexperiment import log from six.moves import range SUBSTATE_KEY_PATTERN = 'replicate%03d' def _replicate_single_thread(function, no_replicates, subkey_pattern=SUBSTATE_KEY_PATTERN): for i in range(no_replicates): with substate_context(subkey_pattern % i): log.debug("Running " + subkey_pattern % i) function() class TargetCreator(object): def __init__(self, target, ready_queue, context): self.target = target self.ready_queue = ready_queue self.context = context try: functools.update_wrapper(self, target) except AttributeError: pass def __call__(self): with substate_context(self.context): self.ready_queue.put(True) log.debug("Running " + self.context) try: result = self.target() except Exception as err: log.fatal("Error in sub-process: %s", traceback.format_exc()) raise err return result def _replicate_multiprocessing(function, no_replicates, no_processes, subkey_pattern=SUBSTATE_KEY_PATTERN): with processing_state_context(): pool = multiprocessing.Pool(processes=no_processes) manager = multiprocessing.Manager() ready_queue = manager.Queue() result_threads = [] for i in range(no_replicates): target = TargetCreator(function, ready_queue, subkey_pattern % i) result = pool.apply_async(target) waiter = threading.Thread(target=result.get) waiter.start() result_threads.append(waiter) while not ready_queue.get(): pass log.debug("Closing pool") pool.close() log.debug("Joining pool") pool.join() log.debug("Joining threads") for thread in result_threads: thread.join() def replicate(function, no_replicates=None, subkey_pattern=SUBSTATE_KEY_PATTERN, parallel=False, no_processes=None): no_replicates = no_replicates or int(conf['pyexperiment.n_replicates']) if not parallel: _replicate_single_thread(function, no_replicates, subkey_pattern) else: no_processes = no_processes or int(conf['pyexperiment.n_processes']) _replicate_multiprocessing(function, no_replicates=no_replicates, no_processes=no_processes, subkey_pattern=subkey_pattern)
MIT License
dissectmalware/xlrd2
xlrd2/__init__.py
open_workbook
python
def open_workbook(filename=None, logfile=sys.stdout, verbosity=0, use_mmap=USE_MMAP, file_contents=None, encoding_override=None, formatting_info=False, on_demand=False, ragged_rows=False, ignore_workbook_corruption=False ): peeksz = 4 if file_contents: peek = file_contents[:peeksz] else: filename = os.path.expanduser(filename) with open(filename, "rb") as f: peek = f.read(peeksz) if peek == b"PK\x03\x04": if file_contents: zf = zipfile.ZipFile(timemachine.BYTES_IO(file_contents)) else: zf = zipfile.ZipFile(filename) component_names = dict([(X12Book.convert_filename(name), name) for name in zf.namelist()]) if verbosity: logfile.write('ZIP component_names:\n') pprint.pprint(component_names, logfile) if 'xl/workbook.xml' in component_names: from . import xlsx bk = xlsx.open_workbook_2007_xml( zf, component_names, logfile=logfile, verbosity=verbosity, use_mmap=use_mmap, formatting_info=formatting_info, on_demand=on_demand, ragged_rows=ragged_rows, ) return bk if 'xl/workbook.bin' in component_names: raise XLRDError('Excel 2007 xlsb file; not supported') if 'content.xml' in component_names: raise XLRDError('Openoffice.org ODS file; not supported') raise XLRDError('ZIP file contents not a known type of workbook') from . import book bk = book.open_workbook_xls( filename=filename, logfile=logfile, verbosity=verbosity, use_mmap=use_mmap, file_contents=file_contents, encoding_override=encoding_override, formatting_info=formatting_info, on_demand=on_demand, ragged_rows=ragged_rows, ignore_workbook_corruption=ignore_workbook_corruption, ) return bk
Open a spreadsheet file for data extraction. :param filename: The path to the spreadsheet file to be opened. :param logfile: An open file to which messages and diagnostics are written. :param verbosity: Increases the volume of trace material written to the logfile. :param use_mmap: Whether to use the mmap module is determined heuristically. Use this arg to override the result. Current heuristic: mmap is used if it exists. :param file_contents: A string or an :class:`mmap.mmap` object or some other behave-alike object. If ``file_contents`` is supplied, ``filename`` will not be used, except (possibly) in messages. :param encoding_override: Used to overcome missing or bad codepage information in older-version files. See :doc:`unicode`. :param formatting_info: The default is ``False``, which saves memory. In this case, "Blank" cells, which are those with their own formatting information but no data, are treated as empty by ignoring the file's ``BLANK`` and ``MULBLANK`` records. This cuts off any bottom or right "margin" of rows of empty or blank cells. Only :meth:`~xlrd2.sheet.Sheet.cell_value` and :meth:`~xlrd2.sheet.Sheet.cell_type` are available. When ``True``, formatting information will be read from the spreadsheet file. This provides all cells, including empty and blank cells. Formatting information is available for each cell. Note that this will raise a NotImplementedError when used with an xlsx file. :param on_demand: Governs whether sheets are all loaded initially or when demanded by the caller. See :doc:`on_demand`. :param ragged_rows: The default of ``False`` means all rows are padded out with empty cells so that all rows have the same size as found in :attr:`~xlrd2.sheet.Sheet.ncols`. ``True`` means that there are no empty cells at the ends of rows. This can result in substantial memory savings if rows are of widely varying sizes. See also the :meth:`~xlrd2.sheet.Sheet.row_len` method. :param ignore_workbook_corruption: This option allows to read corrupted workbooks. When ``False`` you may face CompDocError: Workbook corruption. When ``True`` that exception will be ignored. :returns: An instance of the :class:`~xlrd2.book.Book` class.
https://github.com/dissectmalware/xlrd2/blob/c6701ead96fadc6d9aafc07f3a79ef4ff721013a/xlrd2/__init__.py#L33-L169
import os import pprint import sys import zipfile from . import timemachine from .biffh import ( XL_CELL_BLANK, XL_CELL_BOOLEAN, XL_CELL_DATE, XL_CELL_EMPTY, XL_CELL_ERROR, XL_CELL_NUMBER, XL_CELL_TEXT, XLRDError, biff_text_from_num, error_text_from_code, ) from .book import Book, colname from .formula import * from .info import __VERSION__, __version__ from .sheet import empty_cell from .xldate import XLDateError, xldate_as_datetime, xldate_as_tuple from .xlsx import X12Book if sys.version.startswith("IronPython"): import encodings try: import mmap MMAP_AVAILABLE = 1 except ImportError: MMAP_AVAILABLE = 0 USE_MMAP = MMAP_AVAILABLE
Apache License 2.0
mjhoptics/ray-optics
src/rayoptics/raytr/analyses.py
wave_abr_full_calc
python
def wave_abr_full_calc(fod, fld, wvl, foc, ray_pkg, chief_ray_pkg, ref_sphere): image_pt, ref_dir, ref_sphere_radius = ref_sphere cr, cr_exp_seg = chief_ray_pkg chief_ray, chief_ray_op, wvl = cr cr_exp_pt, cr_exp_dir, cr_exp_dist, ifc, cr_b4_pt, cr_b4_dir = cr_exp_seg ray, ray_op, wvl = ray_pkg k = -2 e1 = eic_distance((ray[1][mc.p], ray[0][mc.d]), (chief_ray[1][mc.p], chief_ray[0][mc.d])) ekp = eic_distance((ray[k][mc.p], ray[k][mc.d]), (chief_ray[k][mc.p], chief_ray[k][mc.d])) b4_pt, b4_dir = transform_after_surface(ifc, (ray[k][mc.p], ray[k][mc.d])) dst = ekp - cr_exp_dist eic_exp_pt = b4_pt - dst*b4_dir p_coord = eic_exp_pt - cr_exp_pt F = ref_dir.dot(b4_dir) - b4_dir.dot(p_coord)/ref_sphere_radius J = p_coord.dot(p_coord)/ref_sphere_radius - 2.0*ref_dir.dot(p_coord) sign_soln = -1 if ref_dir[2]*cr.ray[-1][mc.d][2] < 0 else 1 denom = F + sign_soln*sqrt(F**2 + J/ref_sphere_radius) ep = 0 if denom == 0 else J/denom n_obj = abs(fod.n_obj) n_img = abs(fod.n_img) opd = -n_obj*e1 - ray_op + n_img*ekp + chief_ray_op - n_img*ep return opd
Given a ray, a chief ray and an image pt, evaluate the OPD. The main references for the calculations are in the H. H. Hopkins paper `Calculation of the Aberrations and Image Assessment for a General Optical System <https://doi.org/10.1080/713820605>`_ Args: fod: :class:`~.FirstOrderData` for object and image space refractive indices fld: :class:`~.Field` point for wave aberration calculation wvl: wavelength of ray (nm) foc: defocus amount ray_pkg: input tuple of ray, ray_op, wvl chief_ray_pkg: input tuple of chief_ray, cr_exp_seg ref_sphere: input tuple of image_pt, ref_dir, ref_sphere_radius Returns: opd: OPD of ray wrt chief ray at **fld**
https://github.com/mjhoptics/ray-optics/blob/3b2c9ab9100dd9e0cc9c52c33655dc69286ad40e/src/rayoptics/raytr/analyses.py#L124-L176
from math import sqrt import numpy as np from numpy.fft import fftshift, fft2 from scipy.interpolate import interp1d from rayoptics.util.misc_math import normalize import rayoptics.optical.model_constants as mc from rayoptics.raytr import sampler from rayoptics.raytr.raytrace import eic_distance from rayoptics.elem.transform import transform_after_surface from rayoptics.raytr import trace from rayoptics.raytr import traceerror as terr def get_chief_ray_pkg(opt_model, fld, wvl, foc): if fld.chief_ray is None: trace.aim_chief_ray(opt_model, fld, wvl=wvl) chief_ray_pkg = trace.trace_chief_ray(opt_model, fld, wvl, foc) fld.chief_ray = chief_ray_pkg elif fld.chief_ray[0][2] != wvl: chief_ray_pkg = trace.trace_chief_ray(opt_model, fld, wvl, foc) fld.chief_ray = chief_ray_pkg else: chief_ray_pkg = fld.chief_ray return chief_ray_pkg def setup_exit_pupil_coords(opt_model, fld, wvl, foc, chief_ray_pkg, image_pt_2d=None): cr, cr_exp_seg = chief_ray_pkg if image_pt_2d is None: dist = foc / cr.ray[-1][mc.d][2] image_pt = cr.ray[-1][mc.p] + dist*cr.ray[-1][mc.d] else: image_pt = np.array([image_pt_2d[0], image_pt_2d[1], foc]) image_thi = opt_model['seq_model'].gaps[-1].thi img_pt = np.array(image_pt) img_pt[2] += image_thi ref_sphere_vec = img_pt - cr_exp_seg[mc.p] ref_sphere_radius = np.linalg.norm(ref_sphere_vec) ref_dir = normalize(ref_sphere_vec) ref_sphere = (image_pt, ref_dir, ref_sphere_radius) return ref_sphere
BSD 3-Clause New or Revised License
flennerhag/mlens
mlens/config.py
set_dtype
python
def set_dtype(dtype): global _DTYPE _DTYPE = dtype
Set the dtype to use during estimation. Parameters ---------- dtype : object numpy dtype
https://github.com/flennerhag/mlens/blob/6cbc11354b5f9500a33d9cefb700a1bba9d3199a/mlens/config.py#L133-L142
from __future__ import print_function import os import sys import shutil import tempfile import warnings import sysconfig import subprocess from multiprocessing import current_process import numpy _DTYPE = getattr(numpy, os.environ.get('MLENS_DTYPE', 'float32')) _TMPDIR = os.environ.get('MLENS_TMPDIR', tempfile.gettempdir()) _PREFIX = os.environ.get('MLENS_PREFIX', ".mlens_tmp_cache_") _BACKEND = os.environ.get('MLENS_BACKEND', 'threading') _START_METHOD = os.environ.get('MLENS_START_METHOD', '') _VERBOSE = os.environ.get('MLENS_VERBOSE', 'Y') _IVALS = os.environ.get('MLENS_IVALS', '0.01_120').split('_') _IVALS = (float(_IVALS[0]), float(_IVALS[1])) _PY_VERSION = float(sysconfig._PY_VERSION_SHORT) def get_ivals(): return _IVALS def get_dtype(): return _DTYPE def get_prefix(): return _PREFIX def get_backend(): return _BACKEND def get_start_method(): return _START_METHOD def get_tmpdir(): return _TMPDIR def set_tmpdir(tmp): global _TMPDIR _TMPDIR = tmp def set_prefix(prefix): global _PREFIX _PREFIX = prefix
MIT License
sofia-netsurv/python-netsurv
env/lib/python3.5/site-packages/astroid/as_string.py
AsStringVisitor.__call__
python
def __call__(self, node): return node.accept(self).replace(DOC_NEWLINE, "\n")
Makes this visitor behave as a simple function
https://github.com/sofia-netsurv/python-netsurv/blob/429fb07a2b06cc505fdd9350148266a6b4e23e64/env/lib/python3.5/site-packages/astroid/as_string.py#L38-L40
import sys DOC_NEWLINE = "\0" class AsStringVisitor: def __init__(self, indent): self.indent = indent
MIT License
cta-observatory/ctapipe
ctapipe/tools/info.py
_info_version
python
def _info_version(): import ctapipe print("\n*** ctapipe version info ***\n") print(f"version: {ctapipe.__version__}") print("")
Print version info.
https://github.com/cta-observatory/ctapipe/blob/8851e1214409eac4564996cc0f4b76dfe05cf9cf/ctapipe/tools/info.py#L101-L109
import logging import os import sys from .utils import get_parser from ..core import Provenance, get_module_version from ..core.plugins import detect_and_import_io_plugins from ..utils import datasets from pkg_resources import resource_filename __all__ = ["info"] _dependencies = sorted( [ "astropy", "matplotlib", "numpy", "traitlets", "sklearn", "scipy", "numba", "pytest", "iminuit", "tables", "eventio", ] ) _optional_dependencies = sorted( ["ctapipe_resources", "pytest", "graphviz", "matplotlib"] ) def main(args=None): parser = get_parser(info) parser.add_argument("--version", action="store_true", help="Print version number") parser.add_argument( "--tools", action="store_true", help="Print available command line tools" ) parser.add_argument( "--dependencies", action="store_true", help="Print available versions of dependencies", ) parser.add_argument( "--resources", action="store_true", help="Print available versions of dependencies", ) parser.add_argument("--system", action="store_true", help="Print system info") parser.add_argument( "--all", dest="show_all", action="store_true", help="show all info" ) parser.add_argument("--plugins", action="store_true", help="Print plugin info") args = parser.parse_args(args) if len(sys.argv) <= 1: parser.print_help() sys.exit(1) info(**vars(args)) def info( version=False, tools=False, dependencies=False, resources=False, system=False, plugins=False, show_all=False, ): logging.basicConfig(level=logging.INFO, format="%(levelname)s - %(message)s") if version or show_all: _info_version() if tools or show_all: _info_tools() if dependencies or show_all: _info_dependencies() if resources or show_all: _info_resources() if system or show_all: _info_system() if plugins or show_all: _info_plugins()
BSD 3-Clause New or Revised License
napalm-automation-community/napalm-fortios
napalm_fortios/fortios.py
FortiOSDriver.is_alive
python
def is_alive(self): return { 'is_alive': self.device.ssh.get_transport().is_active() }
Returns a flag with the state of the SSH connection.
https://github.com/napalm-automation-community/napalm-fortios/blob/bdcb3dc338bb992f14c1584d4473bf4c05b76b2c/napalm_fortios/fortios.py#L52-L56
from __future__ import unicode_literals import re from pyFG.fortios import FortiOS, FortiConfig, logger from pyFG.exceptions import FailedCommit, CommandExecutionException from napalm_base.exceptions import ReplaceConfigException, MergeConfigException from napalm_base.utils.string_parsers import colon_separated_string_to_dict, convert_uptime_string_seconds from napalm_base.utils import py23_compat import napalm_base.helpers try: from napalm.base.base import NetworkDriver except ImportError: from napalm_base.base import NetworkDriver class FortiOSDriver(NetworkDriver): def __init__(self, hostname, username, password, timeout=60, optional_args=None): self.hostname = hostname self.username = username self.password = password if optional_args is not None: self.vdom = optional_args.get('fortios_vdom', None) else: self.vdom = None self.device = FortiOS(hostname, username=username, password=password, timeout=timeout, vdom=self.vdom) self.config_replace = False def open(self): self.device.open() def close(self): self.device.close()
Apache License 2.0
hyroai/gamla
gamla/string_utils.py
replace_in_text
python
def replace_in_text(old: Text, new: Text): def replace_in_text(txt: Text): return txt.replace(old, new) return replace_in_text
Return a copy of the string with all occurrences of substring old replaced by new >>> txt = "hello world" >>> replace_in_text("world", "Jhon")(txt) 'hello Jhon'
https://github.com/hyroai/gamla/blob/371d895ecff2833ca68d13025b695b26cd68ac17/gamla/string_utils.py#L5-L15
import re from typing import Text
MIT License
afmurillo/dhalsim
dhalsim/python2/automatic_plc.py
PlcControl.start_plc
python
def start_plc(self): generic_plc_path = Path(__file__).parent.absolute() / "generic_plc.py" if self.data['log_level'] == 'debug': err_put = sys.stderr out_put = sys.stdout else: err_put = open(empty_loc, 'w') out_put = open(empty_loc, 'w') cmd = ["python2", str(generic_plc_path), str(self.intermediate_yaml), str(self.plc_index)] plc_process = subprocess.Popen(cmd, shell=False, stderr=err_put, stdout=out_put) return plc_process
Start a plc process.
https://github.com/afmurillo/dhalsim/blob/16071598bb21f6125159678f2ac42169a4e2613d/dhalsim/python2/automatic_plc.py#L76-L90
import argparse import os import signal import subprocess import sys from pathlib import Path from automatic_node import NodeControl from py2_logger import get_logger empty_loc = '/dev/null' class PlcControl(NodeControl): def __init__(self, intermediate_yaml, plc_index): super(PlcControl, self).__init__(intermediate_yaml) self.logger = get_logger(self.data['log_level']) self.plc_index = plc_index self.output_path = Path(self.data["output_path"]) self.process_tcp_dump = None self.plc_process = None self.this_plc_data = self.data["plcs"][self.plc_index] def terminate(self): self.logger.debug("Stopping tcpdump process on PLC.") self.process_tcp_dump.send_signal(signal.SIGINT) self.process_tcp_dump.wait() if self.process_tcp_dump.poll() is None: self.process_tcp_dump.terminate() if self.process_tcp_dump.poll() is None: self.process_tcp_dump.kill() self.logger.debug("Stopping PLC.") self.plc_process.send_signal(signal.SIGINT) self.plc_process.wait() if self.plc_process.poll() is None: self.plc_process.terminate() if self.plc_process.poll() is None: self.plc_process.kill() def main(self): self.process_tcp_dump = self.start_tcpdump_capture() self.plc_process = self.start_plc() while self.plc_process.poll() is None: pass self.terminate() def start_tcpdump_capture(self): pcap = self.output_path / (self.this_plc_data["interface"] + '.pcap') no_output = open(empty_loc, 'w') tcp_dump = subprocess.Popen(['tcpdump', '-i', self.this_plc_data["interface"], '-w', str(pcap)], shell=False, stderr=no_output, stdout=no_output) return tcp_dump
MIT License
purestorage-openconnect/py-pure-client
pypureclient/flashblade/FB_2_1/models/user_quota_post.py
UserQuotaPost.to_dict
python
def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): if hasattr(self, attr): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(UserQuotaPost, dict): for key, value in self.items(): result[key] = value return result
Returns the model properties as a dict
https://github.com/purestorage-openconnect/py-pure-client/blob/2d9fdef0b73321cea9613e7d1eb881b42845099b/pypureclient/flashblade/FB_2_1/models/user_quota_post.py#L74-L100
import pprint import re import six import typing from ....properties import Property if typing.TYPE_CHECKING: from pypureclient.flashblade.FB_2_1 import models class UserQuotaPost(object): swagger_types = { 'name': 'str', 'quota': 'int' } attribute_map = { 'name': 'name', 'quota': 'quota' } required_args = { 'quota', } def __init__( self, quota, name=None, ): if name is not None: self.name = name self.quota = quota def __setattr__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `UserQuotaPost`".format(key)) if key == "quota" and value is None: raise ValueError("Invalid value for `quota`, must not be `None`") self.__dict__[key] = value def __getattribute__(self, item): value = object.__getattribute__(self, item) if isinstance(value, Property): return None else: return value
BSD 2-Clause Simplified License
microsoft/azure-devops-python-api
azure-devops/azure/devops/v6_0/work_item_tracking_process/work_item_tracking_process_client.py
WorkItemTrackingProcessClient.get_all_work_item_type_fields
python
def get_all_work_item_type_fields(self, process_id, wit_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') response = self._send(http_method='GET', location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196', version='6.0-preview.2', route_values=route_values) return self._deserialize('[ProcessWorkItemTypeField]', self._unwrap_collection(response))
GetAllWorkItemTypeFields. [Preview API] Returns a list of all fields in a work item type. :param str process_id: The ID of the process. :param str wit_ref_name: The reference name of the work item type. :rtype: [ProcessWorkItemTypeField]
https://github.com/microsoft/azure-devops-python-api/blob/451cade4c475482792cbe9e522c1fee32393139e/azure-devops/azure/devops/v6_0/work_item_tracking_process/work_item_tracking_process_client.py#L252-L268
 from msrest import Serializer, Deserializer from ...client import Client from . import models class WorkItemTrackingProcessClient(Client): def __init__(self, base_url=None, creds=None): super(WorkItemTrackingProcessClient, self).__init__(base_url, creds) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) resource_area_identifier = '5264459e-e5e0-4bd8-b118-0985e68a4ec5' def create_process_behavior(self, behavior, process_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') content = self._serialize.body(behavior, 'ProcessBehaviorCreateRequest') response = self._send(http_method='POST', location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e', version='6.0-preview.2', route_values=route_values, content=content) return self._deserialize('ProcessBehavior', response) def delete_process_behavior(self, process_id, behavior_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if behavior_ref_name is not None: route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str') self._send(http_method='DELETE', location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e', version='6.0-preview.2', route_values=route_values) def get_process_behavior(self, process_id, behavior_ref_name, expand=None): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if behavior_ref_name is not None: route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str') query_parameters = {} if expand is not None: query_parameters['$expand'] = self._serialize.query('expand', expand, 'str') response = self._send(http_method='GET', location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e', version='6.0-preview.2', route_values=route_values, query_parameters=query_parameters) return self._deserialize('ProcessBehavior', response) def get_process_behaviors(self, process_id, expand=None): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') query_parameters = {} if expand is not None: query_parameters['$expand'] = self._serialize.query('expand', expand, 'str') response = self._send(http_method='GET', location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e', version='6.0-preview.2', route_values=route_values, query_parameters=query_parameters) return self._deserialize('[ProcessBehavior]', self._unwrap_collection(response)) def update_process_behavior(self, behavior_data, process_id, behavior_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if behavior_ref_name is not None: route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str') content = self._serialize.body(behavior_data, 'ProcessBehaviorUpdateRequest') response = self._send(http_method='PUT', location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e', version='6.0-preview.2', route_values=route_values, content=content) return self._deserialize('ProcessBehavior', response) def create_control_in_group(self, control, process_id, wit_ref_name, group_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') content = self._serialize.body(control, 'Control') response = self._send(http_method='POST', location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58', version='6.0-preview.1', route_values=route_values, content=content) return self._deserialize('Control', response) def move_control_to_group(self, control, process_id, wit_ref_name, group_id, control_id, remove_from_group_id=None): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') if control_id is not None: route_values['controlId'] = self._serialize.url('control_id', control_id, 'str') query_parameters = {} if remove_from_group_id is not None: query_parameters['removeFromGroupId'] = self._serialize.query('remove_from_group_id', remove_from_group_id, 'str') content = self._serialize.body(control, 'Control') response = self._send(http_method='PUT', location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58', version='6.0-preview.1', route_values=route_values, query_parameters=query_parameters, content=content) return self._deserialize('Control', response) def remove_control_from_group(self, process_id, wit_ref_name, group_id, control_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') if control_id is not None: route_values['controlId'] = self._serialize.url('control_id', control_id, 'str') self._send(http_method='DELETE', location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58', version='6.0-preview.1', route_values=route_values) def update_control(self, control, process_id, wit_ref_name, group_id, control_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') if control_id is not None: route_values['controlId'] = self._serialize.url('control_id', control_id, 'str') content = self._serialize.body(control, 'Control') response = self._send(http_method='PATCH', location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58', version='6.0-preview.1', route_values=route_values, content=content) return self._deserialize('Control', response) def add_field_to_work_item_type(self, field, process_id, wit_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') content = self._serialize.body(field, 'AddProcessWorkItemTypeFieldRequest') response = self._send(http_method='POST', location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196', version='6.0-preview.2', route_values=route_values, content=content) return self._deserialize('ProcessWorkItemTypeField', response)
MIT License
nukesor/sticker-finder
stickerfinder/telegram/commands/tag.py
replace_single
python
def replace_single(bot, update, session, chat, user): if ( update.message.reply_to_message is not None and update.message.reply_to_message.sticker is not None ): tg_sticker = update.message.reply_to_message.sticker sticker = session.query(Sticker).get(tg_sticker.file_unique_id) if sticker is None: return "This sticker has not yet been added." is_single_sticker = True elif chat.current_sticker: sticker = chat.current_sticker is_single_sticker = chat.tag_mode not in [ TagMode.sticker_set.value, TagMode.random.value, ] else: return "No sticker for replacement selected" text = update.message.text[8:] if text.strip() == "": return 'You need to add some tags to the /replace command. E.g. "/replace meme prequel obi wan"' tag_sticker( session, text, sticker, user, tg_chat=update.message.chat, chat=chat, message_id=update.message.message_id, single_sticker=is_single_sticker, replace=True, ) if not is_single_sticker: handle_next(session, bot, chat, update.message.chat, user) else: return "Sticker tags replaced."
Tag the last sticker send to this chat.
https://github.com/nukesor/sticker-finder/blob/873468f8de26cc32d1de9b688140569b8086ab5b/stickerfinder/telegram/commands/tag.py#L56-L99
from stickerfinder.session import message_wrapper from stickerfinder.enum import TagMode from stickerfinder.logic.tag import handle_next, tag_sticker from stickerfinder.models import Sticker @message_wrapper() def tag_single(bot, update, session, chat, user): if ( update.message.reply_to_message is not None and update.message.reply_to_message.sticker is not None ): tg_sticker = update.message.reply_to_message.sticker sticker = session.query(Sticker).get(tg_sticker.file_unique_id) if sticker is None: return "This sticker has not yet been added." is_single_sticker = True elif chat.current_sticker: sticker = chat.current_sticker is_single_sticker = chat.tag_mode not in [ TagMode.sticker_set.value, TagMode.random.value, ] else: return "No sticker for replacement selected" text = update.message.text[4:] if text.strip() == "": return 'You need to add some tags to the /tag command. E.g. "/tag meme prequel obi wan"' tag_sticker( session, text, sticker, user, tg_chat=update.message.chat, chat=chat, message_id=update.message.message_id, single_sticker=is_single_sticker, ) if not is_single_sticker: handle_next(session, bot, chat, update.message.chat, user) else: return "Sticker tags changed." @message_wrapper()
MIT License
scikit-learn-contrib/deslib
deslib/util/knne.py
KNNE.fit
python
def fit(self, X, y): X, y = check_X_y(X, y) self.knns_ = {} self.classes_indexes_ = {} self.fit_X_ = X self.fit_y_ = y self.classes_ = np.unique(y) self.n_classes_ = self.classes_.size self._check_n_neighbors(self.n_neighbors) self._set_knn_type() tmp = self._handle_n_neighbors(self.n_neighbors) self._mdc, self._mod, self._neighbors_per_class = tmp for class_ in self.classes_: self.classes_indexes_[class_] = np.argwhere( np.array(y) == class_).ravel() y_c = y[self.classes_indexes_[class_]] X_c = X[self.classes_indexes_[class_], :] knn = self.knn_type_(n_neighbors=self._neighbors_per_class, **self.kwargs) self.knns_[class_] = knn.fit(X_c, y_c) return self
Fit the model according to the given training data. Parameters ---------- X : array of shape (n_samples, n_features) Data used to fit the model. y : array of shape (n_samples) class labels of each example in X.
https://github.com/scikit-learn-contrib/deslib/blob/64260ae7c6dd745ef0003cc6322c9f829c807708/deslib/util/knne.py#L52-L87
import warnings import numpy as np from sklearn.base import BaseEstimator from sklearn.neighbors import KNeighborsClassifier from sklearn.utils import check_X_y from sklearn.utils import check_array from deslib.util import faiss_knn_wrapper from deslib.util.prob_functions import softmax class KNNE(BaseEstimator): def __init__(self, n_neighbors=7, knn_classifier='sklearn', **kwargs): self.n_neighbors = n_neighbors self.knn_classifier = knn_classifier self.kwargs = kwargs
BSD 3-Clause New or Revised License
todbot/blink1-python
blink1/blink1.py
Blink1.write_pattern_line
python
def write_pattern_line(self, step_milliseconds, color, pos, ledn=0): self.set_ledn(ledn) red, green, blue = self.color_to_rgb(color) r, g, b = self.cc(red, green, blue) step_time = int(step_milliseconds / 10) th = (step_time & 0xff00) >> 8 tl = step_time & 0x00ff buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th, tl, pos, 0] self.write(buf)
Write a color & step time color pattern line to RAM :param step_milliseconds: how long for this pattern line to take :param color: LED color :param pos: color pattern line number (0-15) :param ledn: LED number to adjust, 0=all, 1=LEDA, 2=LEDB :raises: Blink1ConnectionFailed: if blink(1) is disconnected
https://github.com/todbot/blink1-python/blob/0058d2f8f175332b1c41cd4d089f2beb26e0b2b5/blink1/blink1.py#L291-L306
import logging import time from contextlib import contextmanager import webcolors import hid import os from .kelvin import kelvin_to_rgb, COLOR_TEMPERATURES class Blink1ConnectionFailed(RuntimeError): class InvalidColor(ValueError): log = logging.getLogger(__name__) if os.getenv('DEBUGBLINK1'): log.setLevel(logging.DEBUG) DEFAULT_GAMMA = (2, 2, 2) DEFAULT_WHITE_POINT = (255, 255, 255) REPORT_ID = 0x01 VENDOR_ID = 0x27B8 PRODUCT_ID = 0x01ED REPORT_SIZE = 9 class ColorCorrect(object): def __init__(self, gamma, white_point): self.gamma = gamma if isinstance(white_point, str): kelvin = COLOR_TEMPERATURES[white_point] self.white_point = kelvin_to_rgb(kelvin) elif isinstance(white_point, (int, float)): self.white_point = kelvin_to_rgb(white_point) else: self.white_point = white_point @staticmethod def gamma_correct(gamma, white, luminance): return round(white * (luminance / 255.0) ** gamma) def __call__(self, r, g, b): color = [r, g, b] return tuple( self.gamma_correct(g, w, l) for (g, w, l) in zip(self.gamma, self.white_point, color) ) class Blink1(object): def __init__(self, serial_number=None, gamma=None, white_point=None): self.cc = ColorCorrect( gamma=gamma or DEFAULT_GAMMA, white_point=(white_point or DEFAULT_WHITE_POINT) ) self.dev = self.find(serial_number) if self.dev is None: print("wtf") def close(self): self.dev.close() self.dev = None @staticmethod def find(serial_number=None): try: hidraw = hid.device(VENDOR_ID, PRODUCT_ID, serial_number) hidraw.open(VENDOR_ID, PRODUCT_ID, serial_number) except IOError as e: raise Blink1ConnectionFailed(e) except OSError as e: raise Blink1ConnectionFailed(e) return hidraw @staticmethod def list(): try: devs = hid.enumerate(VENDOR_ID, PRODUCT_ID) serials = list(map(lambda d: d.get('serial_number'), devs)) return serials except IOError: return [] @staticmethod def notfound(): return None def write(self, buf): log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf)) rc = self.dev.send_feature_report(buf) if rc != REPORT_SIZE: raise Blink1ConnectionFailed( "write returned %d instead of %d" % (rc, REPORT_SIZE) ) def read(self): buf = self.dev.get_feature_report(REPORT_ID, 9) log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf)) return buf def fade_to_rgb_uncorrected( self, fade_milliseconds, red, green, blue, ledn=0 ): action = ord('c') fade_time = int(fade_milliseconds / 10) th = (fade_time & 0xff00) >> 8 tl = fade_time & 0x00ff buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, ledn, 0] self.write(buf) def fade_to_rgb(self, fade_milliseconds, red, green, blue, ledn=0): r, g, b = self.cc(red, green, blue) return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, ledn) @staticmethod def color_to_rgb(color): if isinstance(color, tuple): return color if color.startswith('#'): try: return webcolors.hex_to_rgb(color) except ValueError: raise InvalidColor(color) try: return webcolors.name_to_rgb(color) except ValueError: raise InvalidColor(color) def fade_to_color(self, fade_milliseconds, color, ledn=0): red, green, blue = self.color_to_rgb(color) return self.fade_to_rgb(fade_milliseconds, red, green, blue, ledn) def off(self): self.fade_to_color(0, 'black') def get_version(self): buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0] self.write(buf) time.sleep(.05) version_raw = self.read() version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0')) return str(version) def get_serial_number(self): return self.dev.get_serial_number_string() def play(self, start_pos=0, end_pos=0, count=0): if self.dev is None: raise Blink1ConnectionFailed("must open first") buf = [ REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0 ] self.write(buf) def stop(self): if self.dev is None: return False buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0] self.write(buf) def save_pattern(self): buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0] self.write(buf) def set_ledn(self, ledn=0): buf = [REPORT_ID, ord('l'), ledn, 0, 0, 0, 0, 0, 0] self.write(buf)
MIT License
technige/py2neo
py2neo/errors.py
Neo4jError.split_code
python
def split_code(cls, code): try: parts = code.split(".") except AttributeError: raise ValueError(code) else: if len(parts) == 4 and parts[0] == "Neo": return parts[1], parts[2], parts[3] else: raise ValueError(code)
Splits a status code, returning a 3-tuple of classification, category and title.
https://github.com/technige/py2neo/blob/603abd4d5f672bde039d4362cbec77d0d471f034/py2neo/errors.py#L49-L61
__all__ = [ "Neo4jError", "ClientError", "DatabaseError", "TransientError", "ConnectionUnavailable", "ConnectionBroken", "ConnectionLimit", "ServiceUnavailable", "WriteServiceUnavailable", "ProtocolError", ] class Neo4jError(Exception): @classmethod def hydrate(cls, data): code = data.get("code") message = data.get("message") return cls(message, code) @classmethod
Apache License 2.0
erdc/proteus
proteus/WaveTools.py
tophat
python
def tophat(l,cutoff): a = np.zeros(l,) cut = int(cutoff*l) a[cut:-cut] = 1. return a
Calculates and returns a top hat filter array Parameters ---------- l : int Length of array cutoff : float Cut off fraction at both the leading and tailing part of the array Returns -------- numpy.ndarray
https://github.com/erdc/proteus/blob/fe4872257aae10b5a686394e78259582e93a39cb/proteus/WaveTools.py#L616-L634
from __future__ import print_function from __future__ import absolute_import from __future__ import division from builtins import str from builtins import zip from builtins import range from past.utils import old_div import cython import numpy as np import cmath as cmat from .Profiling import logEvent, logFile from proteus import Comm import time as tt import sys as sys __all__ = ['SteadyCurrent', 'SolitaryWave', 'MonochromaticWaves', 'NewWave', 'RandomWaves', 'MultiSpectraRandomWaves', 'DirectionalWaves', 'TimeSeries', 'RandomWavesFast', 'RandomNLWaves', 'RandomNLWavesFast', 'CombineWaves', 'fastcos_test', 'fastcosh_test', 'fastsinh_test', 'coshkzd_test', 'sinhkzd_test', 'loadExistingFunction', 'setVertDir', 'loadExistingFunction', 'setVertDir', 'setDirVector', 'dirCheck', 'reduceToIntervals', 'returnRectangles', 'returnRectangles3D', 'normIntegral', 'eta_mode', 'Udrift', 'vel_mode', 'sigma', 'JONSWAP', 'PM_mod', 'cos2s', 'mitsuyasu', 'dispersion', 'tophat', 'costap', 'decompose_tseries'] def fastcos_test(phase,sinus=False): if(sinus): phase = old_div(np.pi,2.) - phase return fastcos(phase,True) def fastcosh_test(k,Z,fast=True): cython.declare(xx=cython.double[2]) fastcosh(xx,k,Z,fast) return xx[0] def fastsinh_test(k,Z,fast=True): cython.declare(xx=cython.double[2]) fastcosh(xx,k,Z,fast) return xx[1] def coshkzd_test(k,Z,d, fast=True): if (Z > old_div(-d,2.)): return old_div(fastcosh_test(k,Z,fast), np.tanh(k*d)) + fastsinh_test(k,Z,fast) else: return 0. def sinhkzd_test(k,Z,d,fast=True): if (Z> old_div(-d,2.)): return fastcosh_test(k,Z,fast) + old_div(fastsinh_test(k,Z,fast), np.tanh(k*d)) else: return 0. def loadExistingFunction(funcName, validFunctions): funcNames = [] for func in validFunctions: funcNames.append(func.__name__) if func.__name__ == funcName: func_ret = func if funcName not in funcNames: logEvent("ERROR! Wavetools.py: Wrong function type (%s) given: Valid wavetypes are %s" %(funcName,funcNames), level=0) sys.exit(1) return func_ret def setVertDir(g): return -np.array(old_div(g,(sqrt(g[0]**2 + g[1]**2 + g[2]**2)))) def setDirVector(vector): return old_div(vector,(sqrt(vector[0]**2 + vector[1]**2 + vector[2]**2))) def dirCheck(v1, v2): dircheck = abs(v1[0]*v2[0]+v1[1]*v2[1]+v1[2]*v2[2]) if dircheck > 1e-10: logEvent("Wave direction is not perpendicular to gravity vector. Check input",level=0) return sys.exit(1) else: return None def reduceToIntervals(fi,df): fim_tmp = (0.5*(fi[1:]+fi[:-1])).tolist() return np.array([fim_tmp[0]-0.5*df]+fim_tmp+[fim_tmp[-1]+0.5*df]) def returnRectangles(a,x): return 0.5*(a[1:]+a[:-1])*(x[1:]-x[:-1]) def returnRectangles3D(a,x,y): ai = 0.5*(a[1:,:]+a[:-1,:]) ai = 0.5*(ai[:,1:]+ai[:,:-1]) for ii in range(len(x)-1): ai[ii,:] *= (y[1:]-y[:-1]) for jj in range(len(y) - 1): ai[:,jj] *= (x[1:]-x[:-1]) return ai def normIntegral(f,dom): G0 = old_div(1.,sum(returnRectangles(f,dom))) return G0*f def eta_mode(x, t, kDir, omega, phi, amplitude): phase = x[0]*kDir[0]+x[1]*kDir[1]+x[2]*kDir[2] - omega*t + phi return amplitude*cos(phase) def Udrift(amp,gAbs,c,d): return 0.5*gAbs*amp*amp/c/d def vel_mode(x, t, kDir, kAbs, omega, phi, amplitude, mwl, depth, vDir, gAbs): phase = x[0]*kDir[0]+x[1]*kDir[1]+x[2]*kDir[2] - omega*t + phi Z = (vDir[0]*x[0] + vDir[1]*x[1]+ vDir[2]*x[2]) - mwl UH = 0. UV=0. ii=0. UH=amplitude*omega*cosh(kAbs*(Z + depth))*cos( phase )/sinh(kAbs*depth) UV=amplitude*omega*sinh(kAbs*(Z + depth))*sin( phase )/sinh(kAbs*depth) waveDir = old_div(kDir,kAbs) UH = UH - Udrift(amplitude,gAbs,old_div(omega,kAbs),depth) V = np.array([UH*waveDir[0]+UV*vDir[0], UH*waveDir[1]+UV*vDir[1], UH*waveDir[2]+UV*vDir[2]]) return V def sigma(omega,omega0): sigmaReturn = np.where(omega > omega0,0.09,0.07) return sigmaReturn def JONSWAP(f,f0,Hs,gamma=3.3,TMA=False, depth = None): Tp = old_div(1.,f0) bj = 0.0624*(1.094-0.01915*log(gamma))/(0.23+0.0336*gamma-old_div(0.185,(1.9+gamma))) r = np.exp(old_div(-(Tp*f-1.)**2,(2.*sigma(f,f0)**2))) tma = 1. if TMA: if (depth is None): logEvent("Wavetools:py. Provide valid depth definition definition for TMA spectrum") logEvent("Wavetools:py. Stopping simulation") sys.exit(1) k = dispersion(2*M_PI*f,depth) tma = np.tanh(k*depth)*np.tanh(k*depth)/(1.+ 2.*k*depth/np.sinh(2.*k*depth)) return tma * bj*(Hs**2)*(old_div(1.,((Tp**4) *(f**5))))*np.exp(-1.25*(old_div(1.,(Tp*f)**(4.))))*(gamma**r) def PM_mod(f,f0,Hs): return (old_div(5.0,16.0))*Hs**2*(old_div(f0**4,f**5))*np.exp((old_div(-5.0,4.0))*(old_div(f0,f))**4) def cos2s(theta,f,s=10): fun = np.zeros((len(theta),len(f)),) for ii in range(len(fun[0,:])): fun[:,ii] = np.cos(old_div(theta,2))**(2*s) return fun def mitsuyasu(theta,fi,f0,smax=10): s = smax * (old_div(fi,f0))**(5) ii = np.where(fi>f0)[0][0] s[ii:] = smax * (old_div(fi[ii:],f0))**(-2.5) fun = np.zeros((len(theta),len(fi)),) for ii in range(len(fun[0,:])): fun[:,ii] = np.cos(old_div(theta,2))**(2.*s[ii]) return fun def dispersion(w,d, g = 9.81,niter = 1000): w_aux = np.array(w) K = old_div(w_aux**2,g) for jj in range(niter): K = old_div(w_aux**2,(g*np.tanh(K*d))) if type(K) is float: return K[0] else: return K
MIT License
owtf/http-request-translator
hrt/interface.py
HttpRequestTranslator._parse_request
python
def _parse_request(self): headers_lines = self.request.splitlines() if not headers_lines: raise ValueError("Request Malformed. Please Enter a Valid HTTP request.") new_request_method = headers_lines.pop(0) host = '' header_list = [] while headers_lines: line = headers_lines.pop(0) if not line.strip('\r\n'): break header_list.append(line) try: header, value = line.split(":", 1) if value.startswith(' '): value = value[1:] except ValueError: raise ValueError("Headers Malformed. Please Enter a Valid HTTP request.") if header.lower() == "host": host = value.strip() data = '' if headers_lines: data = ''.join(headers_lines) details_dict = {} details_dict['data'] = data details_dict['method'] = new_request_method.split(' ', 1)[0].strip() details_dict['Host'] = host try: proto_ver = new_request_method.split(' ', 2)[2].split('/', 1) details_dict['protocol'] = proto_ver[0].strip() details_dict['version'] = proto_ver[1].strip() details_dict['path'] = new_request_method.split(' ', 2)[1].strip() except IndexError: details_dict['path'] = "" try: proto_ver = new_request_method.split(' ', 2)[1].split('/', 1) except IndexError: raise ValueError("Request Malformed. Please Enter a Valid HTTP request.") details_dict['protocol'] = proto_ver[0].strip() details_dict['version'] = proto_ver[1].strip() scheme, netloc, path, params, query, frag = urlparse(details_dict['path']) if params: path = path + ";" + params if query: path = path + "?" + query if frag: path = path + "#" + frag details_dict['path'] = path if scheme and not host.startswith(scheme): details_dict['pre_scheme'] = scheme + "://" else: details_dict['pre_scheme'] = '' return header_list, details_dict
Parses Raw HTTP request into separate dictionaries for headers and body and other parameters. :param str request: Raw HTTP request. :raises ValueError: When request passed in malformed. :return: A tuple of two dictionaries where the first one is the headers and the second the details. :rtype: tuple
https://github.com/owtf/http-request-translator/blob/71fac59ac0673a6695a93da211ea2af4a31862b7/hrt/interface.py#L63-L134
try: from urlparse import urlparse except ImportError: from urllib.parse import urlparse from .plugin_manager import generate_script from .url import get_url, check_valid_url, check_valid_port class HttpRequestTranslator(object): def __init__(self, languages=['bash'], request=None, proxy=None, search_string='', data=None): self.languages = languages self.request = request self.data = data self.proxy = proxy self.search_string = search_string self._extract_request_details() def _extract_request_details(self): self.headers, self.details = self._parse_request() if self.data: self.details['data'] = self.data if self.proxy: if not self.proxy.startswith(('http', 'https')): proxy = get_url(self.proxy) else: proxy = self.proxy.strip() try: self.details['proxy_host'], self.details['proxy_port'] = proxy.rsplit(":", 1) except ValueError: raise ValueError("Proxy provided is invalid.") if not check_valid_url(self.details['proxy_host']) or not check_valid_port(self.details['proxy_port']): raise ValueError("Proxy provided is invalid.") def generate_code(self): self._parse_request() all_code = {} for language in self.languages: all_code[language] = generate_script(language, self.headers, self.details, self.search_string) return all_code
BSD 3-Clause New or Revised License
guildai/guildai
guild/commands/init_impl.py
_check_call_with_empty_env
python
def _check_call_with_empty_env(args): env = {"PATH": os.getenv("PATH")} subprocess.check_call(args, env=env)
Pass through to subprocess.check_call with empty env. There is surprising behavior on macOS Python 3x during init where a pip install command from outside a virtual environment - which should install a package correctly in the environment - installs the package but uses the wrong path to Python in installed package shell command shebangs. The result is that package shell scripts (e.g. the `guild` command) will use the wrong Python version. This surprising behavior appears to be related to something in the environment. Setting the command env to an empty dict corrects this problem and ensures that pip installed packages use the correct Python path. Note that setting env to None does not correct the problem. Update: macOS Python 2.7 has issues when PATH is not in the env so we set it to the value of the current PATH env.
https://github.com/guildai/guildai/blob/79d39402201168b7e94007d8e66ecf504e7aa71c/guild/commands/init_impl.py#L447-L467
from __future__ import absolute_import from __future__ import division import hashlib import logging import os import pkg_resources import re import subprocess import sys import six import yaml import guild from guild import cli from guild import config from guild import guildfile from guild import init from guild import pip_util from guild import util log = logging.getLogger("guild") PYTHON_REQ_P = re.compile(r"^\s*#\s*python(\S+?)\s*$", re.MULTILINE) class Config(object): def __init__(self, args): self.env_dir = os.path.abspath(args.dir) self.env_name = self._init_env_name(args.name, self.env_dir) self.guild = args.guild self.guild_home = self._init_guild_home(args) self.no_isolate = args.no_isolate self.user_reqs = self._init_user_reqs(args) self.guild_pkg_reqs = self._init_guild_pkg_reqs(args) self.venv_python = self._init_venv_python(args, self.user_reqs) self.paths = args.path self.system_site_packages = args.system_site_packages self.isolate_resources = args.isolate_resources self.pre_release = args.pre_release self.prompt_params = self._init_prompt_params() self.no_progress = args.no_progress @staticmethod def _init_env_name(name, abs_env_dir): if name: return name basename = os.path.basename(abs_env_dir) if basename != "venv": return basename return os.path.basename(os.path.dirname(abs_env_dir)) @staticmethod def _init_guild_home(args): if args.guild_home: return args.guild_home if args.no_isolate: return config.guild_home() return None @staticmethod def _init_venv_python(args, user_reqs): if args.python: return _python_interpreter_for_arg(args.python) return _suggest_python_interpreter(user_reqs) @staticmethod def _init_guild_pkg_reqs(args): if args.no_reqs: return () reqs = list(_iter_all_guild_pkg_reqs(config.cwd(), args.path)) return tuple(sorted(reqs)) @staticmethod def _init_user_reqs(args): if args.requirement: return args.requirement elif not args.no_reqs: default_reqs = os.path.join(config.cwd(), "requirements.txt") if os.path.exists(default_reqs): return (default_reqs,) return () def _init_prompt_params(self): params = [] params.append(("Location", util.format_dir(self.env_dir))) params.append(("Name", self.env_name)) if self.venv_python: params.append(("Python interpreter", self.venv_python)) else: params.append(("Python interpreter", "default")) params.append( ("Use system site packages", "yes" if self.system_site_packages else "no") ) if self.guild: params.append(("Guild", self.guild)) else: params.append(("Guild", _implicit_guild_version())) if self.guild_home: params.append(("Guild home", self.guild_home)) if self.guild_pkg_reqs: params.append(("Guild package requirements", self.guild_pkg_reqs)) if self.user_reqs: params.append(("Python requirements", self.user_reqs)) if self.pre_release: params.append(("Use pre-release", "yes")) if self.paths: params.append(("Additional paths", self.paths)) if self.isolate_resources: params.append(("Resource cache", "local")) else: params.append(("Resource cache", "shared")) return params def _maybe_guild_pkg_reqs(self): if self.no_guild_pkg_reqs: return [] def as_kw(self): return self.__dict__ def _iter_all_guild_pkg_reqs(dir, search_path, seen=None): seen = seen or set() src = os.path.abspath(guildfile.guildfile_path(dir)) if not os.path.exists(src): return if src in seen: return seen.add(src) for req in _guild_pkg_reqs(src): dir_on_path = _find_req_on_path(req, search_path) if dir_on_path: for req in _iter_all_guild_pkg_reqs(dir_on_path, search_path, seen): yield req else: yield req def _guild_pkg_reqs(src): pkg = _guildfile_pkg(src) if not pkg: return [] return _pkg_requires(pkg, src) def _guildfile_pkg(src): data = _guildfile_data(src) if not isinstance(data, list): data = [data] for item in data: if isinstance(item, dict) and "package" in item: return item return None def _guildfile_data(src): try: f = open(src, "r") return yaml.safe_load(f) except Exception as e: log.warning( "cannot read Guild package requirements for %s (%s) - ignoring", src, e ) return [] def _pkg_requires(pkg_data, src): requires = pkg_data.get("requires") or [] if isinstance(requires, six.string_types): requires = [requires] if not isinstance(requires, list): log.warning( "invalid package requires list in %s (%r) - ignoring", src, pkg_data ) return [] return requires def _find_req_on_path(req, path): req_subpath = req.replace(".", os.path.sep) for root in path: full_path = os.path.join(root, req_subpath) if os.path.exists(full_path): return full_path return None def _implicit_guild_version(): reqs_file = _guild_reqs_file() if reqs_file: return "from source (%s)" % os.path.dirname(reqs_file) else: return guild.__version__ def main(args): _error_if_active_env() config = Config(args) if args.yes or _confirm(config): _init(config) def _error_if_active_env(): active_env = os.getenv("VIRTUAL_ENV") if active_env: cli.error( "cannot run init from an activate environment (%s)\n" "Deactivate the environment by running 'deactivate' " "and try again." % active_env ) def _confirm(config): cli.out("You are about to initialize a Guild environment:") for name, val in config.prompt_params: if isinstance(val, tuple): cli.out(" {}:".format(name)) for x in val: cli.out(" {}".format(x)) else: cli.out(" {}: {}".format(name, val)) return cli.confirm("Continue?", default=True) def _init(config): _test_symlinks() _init_guild_env(config) _init_venv(config) _upgrade_pip(config) _install_guild(config) _install_guild_pkg_reqs(config) _install_user_reqs(config) _install_paths(config) _initialized_msg(config) def _test_symlinks(): try: util.test_windows_symlinks() except OSError: cli.error( "this command requires symbolic link privilege on Windows\n" "Try running this command with administrator privileges." ) def _init_guild_env(config): cli.out("Initializing Guild environment in {}".format(config.env_dir)) try: init.init_env(config.env_dir, config.guild_home, config.isolate_resources) except init.PermissionError as e: cli.error("unable to write to %s - do you have write permission?" % e.args[0]) except init.InitError as e: cli.error(e) def _init_venv(config): cmd_args = _venv_cmd_args(config) if not cmd_args: cli.out("Skipping virtual env") return cli.out("Creating virtual environment") log.debug("venv args: %s", cmd_args) try: subprocess.check_call(cmd_args) except subprocess.CalledProcessError as e: cli.error(str(e), exit_status=e.returncode) def _venv_cmd_args(config): args = _venv_cmd_base_args() + [config.env_dir] args.extend(["--prompt", "({}) ".format(config.env_name)]) if config.venv_python: args.extend(["--python", config.venv_python]) if config.system_site_packages: args.append("--system-site-packages") return args def _venv_cmd_base_args(): return util.find_apply([_virtualenv_cmd, _virtualenv_missing_error]) def _virtualenv_cmd(): return util.find_apply([_virtualenv_module_cmd, _virtualenv_script_cmd]) def _virtualenv_module_cmd(): try: import virtualenv as _ except ImportError: return None else: return [sys.executable, "-m", "virtualenv"] def _virtualenv_script_cmd(): cmd = util.which("virtualenv") if not cmd: return None return [cmd] def _virtualenv_missing_error(): cli.error( "cannot find virtualenv\n" "Try installing it with 'pip install virtualenv'." ) def _upgrade_pip(config): cmd_args = _pip_bin_args(config.env_dir) + ["install", "--upgrade", "pip"] cli.out("Upgrading pip") log.debug("pip upgrade cmd: %s", cmd_args) try: subprocess.check_output(cmd_args) except subprocess.CalledProcessError as e: cli.error(str(e), exit_status=e.returncode) def _install_guild(config): if config.guild: _install_guild_dist(config) else: guild_reqs = _guild_reqs_file() if guild_reqs: _install_guild_reqs(guild_reqs, config) else: _install_default_guild_dist(config) def _install_guild_dist(config): assert config.guild if config.guild[0].isdigit(): cli.out("Installing Guild %s" % config.guild) req = "guildai==%s" % config.guild else: cli.out("Installing %s" % config.guild) req = config.guild _install_reqs([req], config, ignore_installed=True) def _guild_reqs_file(): guild_location = pkg_resources.resource_filename("guild", "") guild_parent = os.path.dirname(guild_location) path = os.path.join(guild_parent, "requirements.txt") try: f = open(path, "r") except (IOError, OSError): pass else: with f: if "guildai" in f.readline(): return path return None def _install_guild_reqs(req_files, config): cli.out("Installing Guild requirements") _install_reqs([req_files], config) def _install_default_guild_dist(config): req = "guildai==%s" % guild.__version__ cli.out("Installing Guild %s" % req) _install_reqs([req], config) def _pip_bin_args(env_dir): if util.get_platform() == "Windows": python_bin = os.path.join(env_dir, "Scripts", "python.exe") else: python_bin = os.path.join(env_dir, "bin", "python") assert os.path.exists(python_bin), python_bin return [python_bin, "-m", "pip"] def _pip_extra_install_opts(config): opts = [] if config.no_progress: opts.extend(["--progress", "off"]) if config.pre_release: opts.append("--pre") return opts def _install_reqs(reqs, config, ignore_installed=False): cmd_args = ( _pip_bin_args(config.env_dir) + ["install", "--no-warn-script-location"] + _pip_extra_install_opts(config) ) if ignore_installed: cmd_args.append("--ignore-installed") for req in reqs: if _is_requirements_file(req): cmd_args.extend(["-r", req]) else: cmd_args.append(req) log.debug("pip cmd: %s", cmd_args) try: _check_call_with_empty_env(cmd_args) except subprocess.CalledProcessError as e: cli.error(str(e), exit_status=e.returncode)
Apache License 2.0
severb/flowy
flowy/swf/client.py
SWFClient.respond_activity_task_failed
python
def respond_activity_task_failed(self, task_token, reason=None, details=None): kwargs = { 'taskToken': str_or_none(task_token), 'reason': str_or_none(reason), 'details': str_or_none(details) } normalize_data(kwargs) response = self.client.respond_activity_task_failed(**kwargs) return response
Wrapper for `boto3.client('swf').respond_activity_task_failed`.
https://github.com/severb/flowy/blob/bb7c7df99b66e5ef8d7806210408487aed9db67a/flowy/swf/client.py#L221-L230
import boto3 from botocore.client import Config from flowy.utils import str_or_none __all__ = ['CHILD_POLICY', 'DURATION', 'IDENTITY_SIZE', 'SWFClient'] class CHILD_POLICY: TERMINATE = 'TERMINATE' REQUEST_CANCEL = 'REQUEST_CANCEL' ABANDON = 'ABANDON' ALL = ('TERMINATE', 'REQUEST_CANCEL', 'ABANDON') class DURATION: INDEF = 'NONE' ONE_YEAR = 31622400 ALL = ('NONE', 31622400) IDENTITY_SIZE = 256 class SWFClient(object): def __init__(self, client=None, config=None, kwargs=None): kwargs = kwargs if isinstance(kwargs, dict) else {} config = config or Config(connect_timeout=70, read_timeout=70) kwargs.setdefault('config', config) self.client = client or boto3.client('swf', **kwargs) def register_activity_type(self, domain, name, version, desc=None, default_task_list=None, default_priority=None, default_heartbeat_timeout=None, default_exec_timeout=None, default_start_timeout=None, default_close_timeout=None): kwargs = { 'domain': str_or_none(domain), 'name': str_or_none(name), 'version': str_or_none(version), 'description': str_or_none(desc), 'defaultTaskList': { 'name': str_or_none(default_task_list) }, 'defaultTaskPriority': str_or_none(default_priority), 'defaultTaskHeartbeatTimeout': duration_encode(default_heartbeat_timeout, 'default_heartbeat_timeout'), 'defaultTaskStartToCloseTimeout': duration_encode(default_exec_timeout, 'default_exec_timeout'), 'defaultTaskScheduleToStartTimeout': duration_encode(default_start_timeout, 'default_start_timeout'), 'defaultTaskScheduleToCloseTimeout': duration_encode(default_close_timeout, 'default_close_timeout') } normalize_data(kwargs) response = self.client.register_activity_type(**kwargs) return response def register_workflow_type(self, domain, name, version, desc=None, default_task_list=None, default_priority=None, default_task_timeout=None, default_exec_timeout=None, default_child_policy=None, default_lambda_role=None): kwargs = { 'domain': str_or_none(domain), 'name': str_or_none(name), 'version': str_or_none(version), 'description': str_or_none(desc), 'defaultTaskList': { 'name': str_or_none(default_task_list) }, 'defaultTaskPriority': str_or_none(default_priority), 'defaultTaskStartToCloseTimeout': duration_encode(default_task_timeout, 'default_task_timeout'), 'defaultExecutionStartToCloseTimeout': duration_encode(default_exec_timeout, 'default_exec_timeout'), 'defaultChildPolicy': cp_encode(default_child_policy), 'defaultLambdaRole': str_or_none(default_lambda_role) } normalize_data(kwargs) response = self.client.register_workflow_type(**kwargs) return response def describe_activity_type(self, domain, name, version): kwargs = { 'domain': str_or_none(domain), 'activityType': { 'name': str_or_none(name), 'version': str_or_none(version) } } normalize_data(kwargs) response = self.client.describe_activity_type(**kwargs) return response def describe_workflow_type(self, domain, name, version): kwargs = { 'domain': str_or_none(domain), 'workflowType': { 'name': str_or_none(name), 'version': str_or_none(version) } } normalize_data(kwargs) response = self.client.describe_workflow_type(**kwargs) return response def start_workflow_execution(self, domain, wid, name, version, input=None, priority=None, task_list=None, execution_start_to_close_timeout=None, task_start_to_close_timeout=None, child_policy=None, tags=None, lambda_role=None): kwargs = { 'domain': str_or_none(domain), 'workflowId': str_or_none(wid), 'workflowType': { 'name': str_or_none(name), 'version': str_or_none(version) }, 'input': str_or_none(input), 'taskPriority': str_or_none(priority), 'taskList': { 'name': str_or_none(task_list) }, 'executionStartToCloseTimeout': str_or_none(execution_start_to_close_timeout), 'taskStartToCloseTimeout': str_or_none(task_start_to_close_timeout), 'childPolicy': cp_encode(child_policy), 'tagList': tags_encode(tags), 'lambda_role': str_or_none(lambda_role) } normalize_data(kwargs) response = self.client.start_workflow_execution(**kwargs) return response def poll_for_decision_task(self, domain, task_list, identity=None, next_page_token=None, max_page_size=1000, reverse_order=False): assert max_page_size <= 1000, 'Page size greater than 1000.' identity = str(identity)[:IDENTITY_SIZE] if identity else identity kwargs = { 'domain': str_or_none(domain), 'taskList': { 'name': str_or_none(task_list) }, 'identity': identity, 'nextPageToken': str_or_none(next_page_token), 'maximumPageSize': max_page_size, 'reverseOrder': reverse_order } normalize_data(kwargs) response = self.client.poll_for_decision_task(**kwargs) return response def poll_for_activity_task(self, domain, task_list, identity=None): identity = str(identity)[:IDENTITY_SIZE] if identity else identity kwargs = { 'domain': str_or_none(domain), 'taskList': { 'name': str_or_none(task_list), }, 'identity': identity, } normalize_data(kwargs) response = self.client.poll_for_activity_task(**kwargs) return response def record_activity_task_heartbeat(self, task_token, details=None): kwargs = { 'taskToken': str_or_none(task_token), 'details': str_or_none(details), } normalize_data(kwargs) response = self.client.record_activity_task_heartbeat(**kwargs) return response
MIT License
cookiecutter/cookiecutter
tests/test_cookiecutter_invocation.py
project_dir
python
def project_dir(): yield 'fake-project-templated' if os.path.isdir('fake-project-templated'): utils.rmtree('fake-project-templated')
Return test project folder name and remove it after the test.
https://github.com/cookiecutter/cookiecutter/blob/d6037b7dee5756e35a6ecd5b522899a9061c2c79/tests/test_cookiecutter_invocation.py#L18-L23
import os import subprocess import sys import pytest from cookiecutter import utils @pytest.fixture
BSD 3-Clause New or Revised License
myriadrf/pylms7002soapy
pyLMS7002Soapy/LMS7002_LimeLight.py
LMS7002_LimeLight.ENABLEDIRCTR1
python
def ENABLEDIRCTR1(self): return self._readReg('IOCFG', 'ENABLEDIRCTR1')
Get the value of ENABLEDIRCTR1
https://github.com/myriadrf/pylms7002soapy/blob/4f828eb9282c302dc6b187d91df5e77c8a6f2d61/pyLMS7002Soapy/LMS7002_LimeLight.py#L129-L133
from pyLMS7002Soapy.LMS7002_base import LMS7002_base class LMS7002_LimeLight(LMS7002_base): __slots__ = [] def __init__(self, chip): self.chip = chip self.channel = None self.prefix = "LimeLight_" @property def DIQDIRCTR2(self): return self._readReg('IOCFG', 'DIQDIRCTR2') @DIQDIRCTR2.setter def DIQDIRCTR2(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'DIQDIRCTR2', value) @property def DIQDIR2(self): return self._readReg('IOCFG', 'DIQDIR2') @DIQDIR2.setter def DIQDIR2(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'DIQDIR2', value) @property def DIQDIRCTR1(self): return self._readReg('IOCFG', 'DIQDIRCTR1') @DIQDIRCTR1.setter def DIQDIRCTR1(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'DIQDIRCTR1', value) @property def DIQDIR1(self): return self._readReg('IOCFG', 'DIQDIR1') @DIQDIR1.setter def DIQDIR1(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'DIQDIR1', value) @property def ENABLEDIRCTR2(self): return self._readReg('IOCFG', 'ENABLEDIRCTR2') @ENABLEDIRCTR2.setter def ENABLEDIRCTR2(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'ENABLEDIRCTR2', value) @property def ENABLEDIR2(self): return self._readReg('IOCFG', 'ENABLEDIR2') @ENABLEDIR2.setter def ENABLEDIR2(self, value): if value not in [0, 1]: raise ValueError("Value must be [0,1]") self._writeReg('IOCFG', 'ENABLEDIR2', value) @property
Apache License 2.0
cosanlab/nltools
nltools/simulator.py
Simulator.normal_noise
python
def normal_noise(self, mu, sigma): self.nifti_masker.fit(self.brain_mask) vlength = int(np.sum(self.brain_mask.get_data())) if sigma != 0: n = self.random_state.normal(mu, sigma, vlength) else: n = [mu] * vlength m = self.nifti_masker.inverse_transform(n) return m.get_data()
produce a normal noise distribution for all all points in the brain mask Args: mu: average value of the gaussian signal (usually set to 0) sigma: standard deviation
https://github.com/cosanlab/nltools/blob/b15eb85829429f5f86116525d1250f2bce1df7f8/nltools/simulator.py#L101-L118
__all__ = ["Simulator", "SimulateGrid"] __author__ = ["Sam Greydanus", "Luke Chang"] __license__ = "MIT" import os import numpy as np import nibabel as nib import pandas as pd import matplotlib.pyplot as plt from nilearn.input_data import NiftiMasker from scipy.stats import multivariate_normal, binom, ttest_1samp from nltools.data import Brain_Data from nltools.stats import fdr, one_sample_permutation from nltools.prefs import MNI_Template, resolve_mni_path import csv from copy import deepcopy from sklearn.utils import check_random_state class Simulator: def __init__( self, brain_mask=None, output_dir=None, random_state=None ): if output_dir is None: self.output_dir = os.path.join(os.getcwd()) else: self.output_dir = output_dir if isinstance(brain_mask, str): brain_mask = nib.load(brain_mask) elif brain_mask is None: brain_mask = nib.load(resolve_mni_path(MNI_Template)["mask"]) elif ~isinstance(brain_mask, nib.nifti1.Nifti1Image): raise ValueError("brain_mask is not a string or a nibabel instance") self.brain_mask = brain_mask self.nifti_masker = NiftiMasker(mask_img=self.brain_mask) self.random_state = check_random_state(random_state) def gaussian(self, mu, sigma, i_tot): x, y, z = np.mgrid[ 0 : self.brain_mask.shape[0], 0 : self.brain_mask.shape[1], 0 : self.brain_mask.shape[2], ] xyz = np.column_stack([x.flat, y.flat, z.flat]) covariance = np.diag(sigma ** 2) g = multivariate_normal.pdf(xyz, mean=mu, cov=covariance) g = g.reshape(x.shape).astype(float) g = np.multiply(self.brain_mask.get_data(), g) g = np.multiply(i_tot / np.sum(g), g) return g def sphere(self, r, p): dims = self.brain_mask.shape x, y, z = np.ogrid[ -p[0] : dims[0] - p[0], -p[1] : dims[1] - p[1], -p[2] : dims[2] - p[2] ] mask = x * x + y * y + z * z <= r * r activation = np.zeros(dims) activation[mask] = 1 activation = np.multiply(activation, self.brain_mask.get_data()) activation = nib.Nifti1Image(activation, affine=np.eye(4)) return activation.get_fdata()
MIT License
morganstanley/testplan
examples/ExecutionPools/Thread/test_plan.py
CustomParser.add_arguments
python
def add_arguments(self, parser): parser.add_argument( "--tasks-num", action="store", type=int, default=8, help="Number of tests to be scheduled.", ) parser.add_argument( "--pool-size", action="store", type=int, default=4, help="How many thread workers assigned to pool.", )
Defining custom arguments for this Testplan.
https://github.com/morganstanley/testplan/blob/8cb6a0ed0682698b2d6af82382fbb66d8d9e3ff7/examples/ExecutionPools/Thread/test_plan.py#L21-L36
import sys from testplan import test_plan from testplan import Task from testplan.parser import TestplanParser from testplan.runners.pools.base import Pool as ThreadPool from testplan.report.testing.styles import Style, StyleEnum OUTPUT_STYLE = Style(StyleEnum.ASSERTION_DETAIL, StyleEnum.ASSERTION_DETAIL) class CustomParser(TestplanParser):
Apache License 2.0
pandas-ml/pandas-ml
pandas_ml/confusion_matrix/abstract.py
ConfusionMatrixAbstract.stats_overall
python
def stats_overall(self): df = self._df_confusion d_stats = collections.OrderedDict() d_class_agreement = class_agreement(df) key = 'Accuracy' try: d_stats[key] = d_class_agreement['diag'] except KeyError: d_stats[key] = np.nan key = '95% CI' try: d_stats[key] = binom_interval(np.sum(np.diag(df)), df.sum().sum()) except: d_stats[key] = np.nan d_prop_test = prop_test(df) d_stats['No Information Rate'] = 'ToDo' d_stats['P-Value [Acc > NIR]'] = d_prop_test['p.value'] d_stats['Kappa'] = d_class_agreement['kappa'] d_stats['Mcnemar\'s Test P-Value'] = 'ToDo' return(d_stats)
Returns an OrderedDict with overall statistics
https://github.com/pandas-ml/pandas-ml/blob/26717cc33ddc3548b023a6410b2235fb21a7b382/pandas_ml/confusion_matrix/abstract.py#L302-L329
import numpy as np import pandas as pd import collections import pandas_ml as pdml from pandas_ml.confusion_matrix.stats import binom_interval, class_agreement, prop_test class ConfusionMatrixAbstract(object): TRUE_NAME = 'Actual' PRED_NAME = 'Predicted' def __init__(self, y_true, y_pred, labels=None, display_sum=True, backend='matplotlib', true_name='Actual', pred_name='Predicted'): self.true_name = true_name self.pred_name = pred_name if isinstance(y_true, pd.Series): self._y_true = y_true self._y_true.name = self.true_name else: self._y_true = pd.Series(y_true, name=self.true_name) if isinstance(y_pred, pd.Series): self._y_pred = y_pred self._y_pred.name = self.pred_name else: self._y_pred = pd.Series(y_pred, name=self.pred_name) if labels is not None: if not self.is_binary: self._y_true = self._y_true.map(lambda i: self._label(i, labels)) self._y_pred = self._y_pred.map(lambda i: self._label(i, labels)) else: N = len(labels) assert len(labels) == 2, "labels be a list with length=2 - length=%d" % N d = {labels[0]: False, labels[1]: True} self._y_true = self._y_true.map(d) self._y_pred = self._y_pred.map(d) raise(NotImplementedError) N_true = len(y_true) N_pred = len(y_pred) assert N_true == N_pred, "y_true must have same size - %d != %d" % (N_true, N_pred) df = pd.crosstab(self._y_true, self._y_pred) idx = self._classes(df) if self.is_binary and pdml.compat._PANDAS_ge_021: df = df.reindex([False, True]) df = df.reindex([False, True], axis=1) df = df.fillna(0) else: df = df.loc[idx, idx.copy()].fillna(0) self._df_confusion = df self._df_confusion.index.name = self.true_name self._df_confusion.columns.name = self.pred_name self._df_confusion = self._df_confusion.astype(np.int64) self._len = len(idx) self.backend = backend self.display_sum = display_sum def _label(self, i, labels): try: return(labels[i]) except IndexError: return(i) def __repr__(self): return(self.to_dataframe(calc_sum=self.display_sum).__repr__()) def __str__(self): return(self.to_dataframe(calc_sum=self.display_sum).__str__()) @property def classes(self): return(self._classes()) def _classes(self, df=None): if df is None: df = self.to_dataframe() idx_classes = (df.columns | df.index).copy() idx_classes.name = 'Classes' return(idx_classes) def to_dataframe(self, normalized=False, calc_sum=False, sum_label='__all__'): if normalized: a = self._df_confusion.values.astype('float') a = a.astype('float') / a.sum(axis=1)[:, np.newaxis] df = pd.DataFrame(a, index=self._df_confusion.index.copy(), columns=self._df_confusion.columns.copy()) else: df = self._df_confusion if calc_sum: df = df.copy() df[sum_label] = df.sum(axis=1) df = pd.concat([df, pd.DataFrame(df.sum(axis=0), columns=[sum_label]).T]) df.index.name = self.true_name return(df) @property def true(self): s = self.to_dataframe().sum(axis=1) s.name = self.true_name return(s) @property def pred(self): s = self.to_dataframe().sum(axis=0) s.name = self.pred_name return(s) def to_array(self, normalized=False, sum=False): return(self.to_dataframe(normalized, sum).values) def toarray(self, *args, **kwargs): return(self.to_array(*args, **kwargs)) def len(self): return(self._len) def sum(self): return(self.to_dataframe().sum().sum()) @property def population(self): return(self.sum()) def y_true(self, func=None): if func is None: return(self._y_true) else: return(self._y_true.map(func)) def y_pred(self, func=None): if func is None: return(self._y_pred) else: return(self._y_pred.map(func)) @property def title(self): if self.is_binary: return("Binary confusion matrix") else: return("Confusion matrix") def plot(self, normalized=False, backend='matplotlib', ax=None, max_colors=10, **kwargs): df = self.to_dataframe(normalized) try: cmap = kwargs['cmap'] except KeyError: import matplotlib.pyplot as plt cmap = plt.cm.gray_r title = self.title if normalized: title += " (normalized)" if backend == 'matplotlib': import matplotlib.pyplot as plt fig, ax = plt.subplots(figsize=(9, 8)) plt.imshow(df, cmap=cmap, interpolation='nearest') ax.set_title(title) tick_marks_col = np.arange(len(df.columns)) tick_marks_idx = tick_marks_col.copy() ax.set_yticks(tick_marks_idx) ax.set_xticks(tick_marks_col) ax.set_xticklabels(df.columns, rotation=45, ha='right') ax.set_yticklabels(df.index) ax.set_ylabel(df.index.name) ax.set_xlabel(df.columns.name) N_max = self.max() if N_max > max_colors: plt.colorbar() else: pass return ax elif backend == 'seaborn': import seaborn as sns ax = sns.heatmap(df, **kwargs) return ax else: msg = "'backend' must be either 'matplotlib' or 'seaborn'" raise ValueError(msg) def binarize(self, select): if not isinstance(select, collections.Iterable): select = np.array(select) y_true_bin = self.y_true().map(lambda x: x in select) y_pred_bin = self.y_pred().map(lambda x: x in select) from pandas_ml.confusion_matrix.bcm import BinaryConfusionMatrix binary_cm = BinaryConfusionMatrix(y_true_bin, y_pred_bin) return(binary_cm) def enlarge(self, select): if not isinstance(select, collections.Iterable): idx_new_cls = pd.Index([select]) else: idx_new_cls = pd.Index(select) new_idx = self._df_confusion.index | idx_new_cls new_idx.name = self.true_name new_col = self._df_confusion.columns | idx_new_cls new_col.name = self.pred_name print(new_col) self._df_confusion = self._df_confusion.loc[:, new_col] @property
BSD 3-Clause New or Revised License
georgedouzas/sports-betting
sportsbet/datasets/_soccer/_fd.py
_extract_extra_leagues_param_grid
python
def _extract_extra_leagues_param_grid(): urls = _extract_leagues_urls('Extra Leagues') extra_leagues_urls = {} for url in urls: html = urlopen(urljoin(URL, url)) bsObj = BeautifulSoup(html.read(), features='html.parser') league = url.replace('.php', '') extra_leagues_urls[league] = list({el.get('href') for el in bsObj.find_all('a') if el.get('href').endswith('csv')}) extra_leagues_param_grid = [] for league, urls in extra_leagues_urls.items(): years = pd.read_csv(urljoin(URL, urls[0]), usecols=['Season'])['Season'] years = list({s if type(s) is not str else int(s.split('/')[-1]) for s in years.unique()}) extra_leagues_param_grid.append({'league': [league], 'division': [1], 'year': years, 'url': urls}) return ParameterGrid(extra_leagues_param_grid)
Extract parameter grid of extra leagues.
https://github.com/georgedouzas/sports-betting/blob/71d6f128bc516fc8f26decad60c2f7fd6f828945/sportsbet/datasets/_soccer/_fd.py#L323-L342
from urllib.request import urlopen, urljoin from datetime import datetime from os.path import join import numpy as np import pandas as pd from sklearn.model_selection import ParameterGrid from bs4 import BeautifulSoup from rich.progress import track from . import TARGETS from .._utils import _DataLoader URL = 'http://www.football-data.co.uk' REMOVED = [ ('Div', None, None), ('Season', None, None), ('League', None, None), ('Country', None, None), ('Time', None, None), ('FTR', None, None), ('Res', None, None), ('Attendance', None, None), ('Referee', None, None), ('HTR', None, None), ('BbAH', None, None), ('Bb1X2', None, None), ('BbOU', None, None) ] CREATED = [ (None, 'league', object), (None, 'division', int), (None, 'year', int) ] RENAMED = [ ('HomeTeam', 'home_team', object), ('AwayTeam', 'away_team', object), ('Date', 'date', np.datetime64) ] BOOKMAKERS_MAPPING = { 'B365': 'bet365', 'LB': 'ladbrokers', 'GB': 'gamebookers', 'BbMx': 'betbrain_maximum', 'BbAv': 'betbrain_average', 'BW': 'betwin', 'BS': 'bluesquare', } CONFIG = [ ('B365AH', 'bet365_size_of_handicap_home_team', object), ('LBAH', 'ladbrokes_size_of_handicap_home_team', object), ('BbAHh', 'betbrain_size_of_handicap_home_team', object), ('GBAH', 'gamebookers_size_of_handicap_home_team', object), ('AHh', 'market_size_of_handicap_home_team', object), ('AHCh', 'market_closing_size_of_handicap_home_team', object), ('B365H', 'bet365_home_win_odds', float), ('B365D', 'bet365_draw_odds', float), ('B365A', 'bet365_away_win_odds', float), ('B365>2.5', 'bet365_over_2.5_odds', float), ('B365<2.5', 'bet365_under_2.5_odds', float), ('B365AHH', 'bet365_asian_handicap_home_team_odds', float), ('B365AHA', 'bet365_asian_handicap_away_team_odds', float), ('B365CH', 'bet365_closing_home_win_odds', float), ('B365CD', 'bet365_closing_draw_odds', float), ('B365CA', 'bet365_closing_away_win_odds', float), ('B365C>2.5', 'bet365_closing_over_2.5_odds', float), ('B365C<2.5', 'bet365_closing_under_2.5_odds', float), ('B365CAHH', 'bet365_closing_asian_handicap_home_team_odds', float), ('B365CAHA', 'bet365_closing_asian_handicap_away_team_odds', float), ('BbMxH', 'betbrain_maximum_home_win_odds', float), ('BbMxD', 'betbrain_maximum_draw_odds', float), ('BbMxA', 'betbrain_maximum_away_win_odds', float), ('BbMx>2.5', 'betbrain_maximum_over_2.5_odds', float), ('BbMx<2.5', 'betbrain_maximum_under_2.5_odds', float), ('BbMxAHH', 'betbrain_maximum_asian_handicap_home_team_odds', float), ('BbMxAHA', 'betbrain_maximum_asian_handicap_away_team_odds', float), ('BbAvH', 'betbrain_average_home_win_odds', float), ('BbAvD', 'betbrain_average_draw_win_odds', float), ('BbAvA', 'betbrain_average_away_win_odds', float), ('BbAv>2.5', 'betbrain_average_over_2.5_odds', float), ('BbAv<2.5', 'betbrain_average_under_2.5_odds', float), ('BbAvAHH', 'betbrain_average_asian_handicap_home_team_odds', float), ('BbAvAHA', 'betbrain_average_asian_handicap_away_team_odds', float), ('BWH', 'bet_win_home_win_odds', float), ('BWD', 'bet_win_draw_odds', float), ('BWA', 'bet_win_away_win_odds', float), ('BWCH', 'bet_win_closing_home_win_odds', float), ('BWCD', 'bet_win_closing_draw_odds', float), ('BWCA', 'bet_win_closing_away_win_odds', float), ('BSH', 'blue_square_home_win_odds', float), ('BSD', 'blue_square_draw_odds', float), ('BSA', 'blue_square_away_win_odds', float), ('GBH', 'gamebookers_home_win_odds', float), ('GBD', 'gamebookers_draw_odds', float), ('GBA', 'gamebookers_away_win_odds', float), ('GB>2.5', 'gamebookers_over_2.5_odds', float), ('GB<2.5', 'gamebookers_under_2.5_odds', float), ('GBAHH', 'gamebookers_asian_handicap_home_team_odds', float), ('GBAHA', 'gamebookers_asian handicap_away_team_odds', float), ('IWH', 'interwetten_home_win_odds', float), ('IWD', 'interwetten_draw_odds', float), ('IWA', 'interwetten_away_win_odds', float), ('IWCH', 'interwetten_closing_home_win_odds', float), ('IWCD', 'interwetten_closing_draw_odds', float), ('IWCA', 'interwetten_closing_away_win_odds', float), ('LBH', 'ladbrokes_home_win_odds', float), ('LBD', 'ladbrokes_draw_odds', float), ('LBA', 'ladbrokes_away_win_odds', float), ('LBAHH', 'ladbrokes_asian_handicap_home_team_odds', float), ('LBAHA', 'ladbrokes_asian_handicap_away_team_odds', float), ('PSH', 'pinnacle_home_win_odds', float), ('PSD', 'pinnacle_draw_odds', float), ('PSA', 'pinnacle_away_win_odds', float), ('P>2.5', 'pinnacle_over_2.5_odds', float), ('P<2.5', 'pinnacle_under_2.5_odds', float), ('PAHH', 'pinnacle_asian_handicap_home_team_odds', float), ('PAHA', 'pinnacle_asian_handicap_away_team_odds', float), ('PSCH', 'pinnacle_closing_home_win_odds', float), ('PSCD', 'pinnacle_closing_draw_odds', float), ('PSCA', 'pinnacle_closing_away_win_odds', float), ('PC>2.5', 'pinnacle_closing_over_2.5_odds', float), ('PC<2.5', 'pinnacle_closing_under_2.5_odds', float), ('PCAHH', 'pinnacle_closing_asian_handicap_home_team_odds', float), ('PCAHA', 'pinnacle_closing_asian_handicap_away_team_odds', float), ('SOH', 'sporting_odds_home_win_odds', float), ('SOD', 'sporting_odds_draw_odds', float), ('SOA', 'sporting_odds_away_win_odds', float), ('SBH', 'sportingbet_home_win_odds', float), ('SBD', 'sportingbet_draw_odds', float), ('SBA', 'sportingbet_away_win_odds', float), ('SJH', 'stan_james_home_win_odds', float), ('SJD', 'stan_james_draw_odds', float), ('SJA', 'stan_james_away_win_odds', float), ('SYH', 'stanleybet_home_win_odds', float), ('SYD', 'stanleybet_draw_odds', float), ('SYA', 'stanleybet_away_win_odds', float), ('VCH', 'vc_bet_home_win_odds', float), ('VCD', 'vc_bet_draw_odds', float), ('VCA', 'vc_bet_away_win_odds', float), ('VCCH', 'vc_bet_closing_home_win_odds', float), ('VCCD', 'vc_bet_closing_draw_odds', float), ('VCCA', 'vc_bet_closing_away_win_odds', float), ('WHH', 'william_hill_home_win_odds', float), ('WHD', 'william_hill_draw_odds', float), ('WHA', 'william_hill_away_win_odds', float), ('WHCH', 'william_hill_closing_home_win_odds', float), ('WHCD', 'william_hill_closing_draw_odds', float), ('WHCA', 'william_hill_closing_away_win_odds', float), ('MaxH', 'market_maximum_home_win_odds', float), ('MaxD', 'market_maximum_draw_odds', float), ('MaxA', 'market_maximum_away_win_odds', float), ('Max>2.5', 'market_maximum_over_2.5_odds', float), ('Max<2.5', 'market_maximum_under_2.5_odds', float), ('MaxAHH', 'market_maximum_asian_handicap_home_team_odds', float), ('MaxAHA', 'market_maximum_asian_handicap_away_team_odds', float), ('MaxCH', 'market_closing_maximum_home_win_odds', float), ('MaxCD', 'market_closing_maximum_draw_odds', float), ('MaxCA', 'market_closing_maximum_away_win_odds', float), ('MaxC>2.5', 'market_closing_maximum_over_2.5_odds', float), ('MaxC<2.5', 'market_closing_maximum_under_2.5_odds', float), ('MaxCAHH', 'market_closing_maximum_asian_handicap_home_team_odds', float), ('MaxCAHA', 'market_closing_maximum_asian_handicap_away_team_odds', float), ('AvgH', 'market_average_home_win_odds', float), ('AvgD', 'market_average_draw_odds', float), ('AvgA', 'market_average_away_win_odds', float), ('Avg>2.5', 'market_average_over_2.5_odds', float), ('Avg<2.5', 'market_average_under_2.5_odds', float), ('AvgAHH', 'market_average_asian_handicap_home_team_odds', float), ('AvgAHA', 'market_average_asian_handicap_away_team_odds', float), ('AvgCH', 'market_closing_average_home_win_odds', float), ('AvgCD', 'market_closing_average_draw_odds', float), ('AvgCA', 'market_closing_average_away_win_odds', float), ('AvgC>2.5', 'market_closing_average_over_2.5_odds', float), ('AvgC<2.5', 'market_closing_average_under_2.5_odds', float), ('AvgCAHH', 'market_closing_average_asian_handicap_home_team_odds', float), ('AvgCAHA', 'market_closing_average_asian_handicap_away_team_odds', float), ('FTHG', 'home_team__full_time_goals', int), ('FTAG', 'away_team__full_time_goals', int), ('HTHG', 'home_team__half_time_goals', int), ('HTAG', 'away_team__half_time_goals', int), ('HS', 'home_team__shots', int), ('AS', 'away_team__shots', int), ('HST', 'home_team__shots_on_target', int), ('AST', 'away_team__shots_on_target', int), ('HHW', 'home_team__hit_woodork', int), ('AHW', 'away_team__hit_woodork', int), ('HC', 'home_team__corners', int), ('AC', 'away_team__corners', int), ('HF', 'home_team__fouls_committed', int), ('AF', 'away_team__fouls_committed', int), ('HFKC', 'home_team__free_kicks_conceded', int), ('AFKC', 'away_team__free_kicks_conceded', int), ('HO', 'home_team__offsides', int), ('AO', 'away_team__offsides', int), ('HY', 'home_team__yellow_cards', int), ('AY', 'away_team__yellow_cards', int), ('HR', 'home_team__red_cards', int), ('AR', 'away_team__red_cards', int), ('HBP', 'home_team__bookings_points', float), ('ABP', 'away_team__bookings_points', float) ] def _extract_leagues_urls(leagues_type): html = urlopen(urljoin(URL, 'data.php')) bsObj = BeautifulSoup(html.read(), features='html.parser') return [ el.get('href') for el in bsObj.find(text=leagues_type).find_next().find_all('a') ] def _extract_main_leagues_param_grid(): urls = _extract_leagues_urls('Main Leagues') main_leagues_urls = {} for url in urls: html = urlopen(urljoin(URL, url)) bsObj = BeautifulSoup(html.read(), features='html.parser') league = url.replace('m.php', '').capitalize() main_leagues_urls[league] = [el.get('href') for el in bsObj.find_all('a') if el.get('href').endswith('csv')] main_leagues_param_grid = [] for league, urls in main_leagues_urls.items(): league_param_grid = [] divisions = [] for url in urls: _, year, division = url.split('/') year = datetime.strptime(year[2:], '%y').year div = division.replace('.csv', '') division = div[-1] param_grid = {'league': [league], 'division': division, 'year': [year], 'url': [url], 'Div': [div]} league_param_grid.append(param_grid) divisions.append(division) div_offset = int('0' in divisions) for param_grid in league_param_grid: param_grid['division'] = [int(param_grid['division']) + div_offset] if param_grid['division'] != 'C' else [5] main_leagues_param_grid += league_param_grid return ParameterGrid(main_leagues_param_grid)
MIT License
tomaae/homeassistant-mikrotik_router
custom_components/mikrotik_router/mikrotik_controller.py
MikrotikControllerData.get_interface
python
def get_interface(self): self.data["interface"] = parse_api( data=self.data["interface"], source=self.api.path("/interface"), key="default-name", key_secondary="name", vals=[ {"name": "default-name"}, {"name": ".id"}, {"name": "name", "default_val": "default-name"}, {"name": "type", "default": "unknown"}, {"name": "running", "type": "bool"}, { "name": "enabled", "source": "disabled", "type": "bool", "reverse": True, }, {"name": "port-mac-address", "source": "mac-address"}, {"name": "comment"}, {"name": "last-link-down-time"}, {"name": "last-link-up-time"}, {"name": "link-downs"}, {"name": "tx-queue-drop"}, {"name": "actual-mtu"}, {"name": "about", "source": ".about", "default": ""}, ], ensure_vals=[ {"name": "client-ip-address"}, {"name": "client-mac-address"}, {"name": "rx-bits-per-second", "default": 0}, {"name": "tx-bits-per-second", "default": 0}, ], skip=[ {"name": "type", "value": "bridge"}, {"name": "type", "value": "ppp-in"}, {"name": "type", "value": "pptp-in"}, {"name": "type", "value": "sstp-in"}, {"name": "type", "value": "l2tp-in"}, {"name": "type", "value": "pppoe-in"}, {"name": "type", "value": "ovpn-in"}, ], ) self.data["interface"] = parse_api( data=self.data["interface"], source=self.api.path("/interface/ethernet"), key="default-name", key_secondary="name", vals=[ {"name": "default-name"}, {"name": "name", "default_val": "default-name"}, {"name": "poe-out", "default": "N/A"}, {"name": "sfp-shutdown-temperature", "default": ""}, ], skip=[ {"name": "type", "value": "bridge"}, {"name": "type", "value": "ppp-in"}, {"name": "type", "value": "pptp-in"}, {"name": "type", "value": "sstp-in"}, {"name": "type", "value": "l2tp-in"}, {"name": "type", "value": "pppoe-in"}, {"name": "type", "value": "ovpn-in"}, ], ) for uid, vals in self.data["interface"].items(): if vals["default-name"] == "": self.data["interface"][uid]["default-name"] = vals["name"] self.data["interface"][uid][ "port-mac-address" ] = f"{vals['port-mac-address']}-{vals['name']}" if ( "sfp-shutdown-temperature" in vals and vals["sfp-shutdown-temperature"] != "" ): self.data["interface"] = parse_api( data=self.data["interface"], source=self.api.get_sfp(vals[".id"]), key_search="name", vals=[ {"name": "status", "default": "unknown"}, {"name": "auto-negotiation", "default": "unknown"}, {"name": "advertising", "default": "unknown"}, {"name": "link-partner-advertising", "default": "unknown"}, {"name": "sfp-temperature", "default": "unknown"}, {"name": "sfp-supply-voltage", "default": "unknown"}, {"name": "sfp-module-present", "default": "unknown"}, {"name": "sfp-tx-bias-current", "default": "unknown"}, {"name": "sfp-tx-power", "default": "unknown"}, {"name": "sfp-rx-power", "default": "unknown"}, {"name": "sfp-rx-loss", "default": "unknown"}, {"name": "sfp-tx-fault", "default": "unknown"}, {"name": "sfp-type", "default": "unknown"}, {"name": "sfp-connector-type", "default": "unknown"}, {"name": "sfp-vendor-name", "default": "unknown"}, {"name": "sfp-vendor-part-number", "default": "unknown"}, {"name": "sfp-vendor-revision", "default": "unknown"}, {"name": "sfp-vendor-serial", "default": "unknown"}, {"name": "sfp-manufacturing-date", "default": "unknown"}, {"name": "eeprom-checksum", "default": "unknown"}, ], )
Get all interfaces data from Mikrotik
https://github.com/tomaae/homeassistant-mikrotik_router/blob/3e4223c52553678175626d98f5b55cfa71d4b9d4/custom_components/mikrotik_router/mikrotik_controller.py#L626-L731
import asyncio import ipaddress import logging import re import pytz from datetime import datetime, timedelta from ipaddress import ip_address, IPv4Network from mac_vendor_lookup import AsyncMacLookup from homeassistant.core import callback from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval from homeassistant.util.dt import utcnow from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.const import ( CONF_NAME, CONF_HOST, CONF_PORT, CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME, CONF_PASSWORD, CONF_SSL, ) from .const import ( DOMAIN, CONF_TRACK_IFACE_CLIENTS, DEFAULT_TRACK_IFACE_CLIENTS, CONF_TRACK_HOSTS, DEFAULT_TRACK_HOSTS, CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL, DEFAULT_UNIT_OF_MEASUREMENT, CONF_SENSOR_PORT_TRAFFIC, DEFAULT_SENSOR_PORT_TRAFFIC, CONF_SENSOR_CLIENT_TRAFFIC, DEFAULT_SENSOR_CLIENT_TRAFFIC, CONF_SENSOR_SIMPLE_QUEUES, DEFAULT_SENSOR_SIMPLE_QUEUES, CONF_SENSOR_NAT, DEFAULT_SENSOR_NAT, CONF_SENSOR_MANGLE, DEFAULT_SENSOR_MANGLE, CONF_SENSOR_FILTER, DEFAULT_SENSOR_FILTER, CONF_SENSOR_KIDCONTROL, DEFAULT_SENSOR_KIDCONTROL, CONF_SENSOR_PPP, DEFAULT_SENSOR_PPP, CONF_SENSOR_SCRIPTS, DEFAULT_SENSOR_SCRIPTS, CONF_SENSOR_ENVIRONMENT, DEFAULT_SENSOR_ENVIRONMENT, ) from .exceptions import ApiEntryNotFound from .helper import parse_api from .mikrotikapi import MikrotikAPI _LOGGER = logging.getLogger(__name__) DEFAULT_TIME_ZONE = None def is_valid_ip(address): try: ipaddress.ip_address(address) return True except ValueError: return False def utc_from_timestamp(timestamp: float) -> datetime: return pytz.utc.localize(datetime.utcfromtimestamp(timestamp)) def as_local(dattim: datetime) -> datetime: if dattim.tzinfo == DEFAULT_TIME_ZONE: return dattim if dattim.tzinfo is None: dattim = pytz.utc.localize(dattim) return dattim.astimezone(DEFAULT_TIME_ZONE) class MikrotikControllerData: def __init__(self, hass, config_entry): self.hass = hass self.config_entry = config_entry self.name = config_entry.data[CONF_NAME] self.host = config_entry.data[CONF_HOST] self.data = { "routerboard": {}, "resource": {}, "health": {}, "interface": {}, "bridge": {}, "bridge_host": {}, "arp": {}, "nat": {}, "kid-control": {}, "mangle": {}, "filter": {}, "ppp_secret": {}, "ppp_active": {}, "fw-update": {}, "script": {}, "queue": {}, "dns": {}, "dhcp-server": {}, "dhcp-network": {}, "dhcp": {}, "capsman_hosts": {}, "wireless_hosts": {}, "host": {}, "host_hass": {}, "accounting": {}, "environment": {}, } self.listeners = [] self.lock = asyncio.Lock() self.lock_ping = asyncio.Lock() self.api = MikrotikAPI( config_entry.data[CONF_HOST], config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD], config_entry.data[CONF_PORT], config_entry.data[CONF_SSL], ) self.api_ping = MikrotikAPI( config_entry.data[CONF_HOST], config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD], config_entry.data[CONF_PORT], config_entry.data[CONF_SSL], ) self.nat_removed = {} self.mangle_removed = {} self.filter_removed = {} self.host_hass_recovered = False self.host_tracking_initialized = False self.support_capsman = False self.support_wireless = False self.support_ppp = False self.major_fw_version = 0 self._force_update_callback = None self._force_fwupdate_check_callback = None self._async_ping_tracked_hosts_callback = None self.async_mac_lookup = AsyncMacLookup() async def async_init(self): self._force_update_callback = async_track_time_interval( self.hass, self.force_update, self.option_scan_interval ) self._force_fwupdate_check_callback = async_track_time_interval( self.hass, self.force_fwupdate_check, timedelta(hours=1) ) self._async_ping_tracked_hosts_callback = async_track_time_interval( self.hass, self.async_ping_tracked_hosts, timedelta(seconds=15) ) @property def option_track_iface_clients(self): return self.config_entry.options.get( CONF_TRACK_IFACE_CLIENTS, DEFAULT_TRACK_IFACE_CLIENTS ) @property def option_track_network_hosts(self): return self.config_entry.options.get(CONF_TRACK_HOSTS, DEFAULT_TRACK_HOSTS) @property def option_sensor_port_traffic(self): return self.config_entry.options.get( CONF_SENSOR_PORT_TRAFFIC, DEFAULT_SENSOR_PORT_TRAFFIC ) @property def option_sensor_client_traffic(self): return self.config_entry.options.get( CONF_SENSOR_CLIENT_TRAFFIC, DEFAULT_SENSOR_CLIENT_TRAFFIC ) @property def option_sensor_simple_queues(self): return self.config_entry.options.get( CONF_SENSOR_SIMPLE_QUEUES, DEFAULT_SENSOR_SIMPLE_QUEUES ) @property def option_sensor_nat(self): return self.config_entry.options.get(CONF_SENSOR_NAT, DEFAULT_SENSOR_NAT) @property def option_sensor_mangle(self): return self.config_entry.options.get(CONF_SENSOR_MANGLE, DEFAULT_SENSOR_MANGLE) @property def option_sensor_filter(self): return self.config_entry.options.get(CONF_SENSOR_FILTER, DEFAULT_SENSOR_FILTER) @property def option_sensor_kidcontrol(self): return self.config_entry.options.get( CONF_SENSOR_KIDCONTROL, DEFAULT_SENSOR_KIDCONTROL ) @property def option_sensor_ppp(self): return self.config_entry.options.get(CONF_SENSOR_PPP, DEFAULT_SENSOR_PPP) @property def option_sensor_scripts(self): return self.config_entry.options.get( CONF_SENSOR_SCRIPTS, DEFAULT_SENSOR_SCRIPTS ) @property def option_sensor_environment(self): return self.config_entry.options.get( CONF_SENSOR_ENVIRONMENT, DEFAULT_SENSOR_ENVIRONMENT ) @property def option_scan_interval(self): scan_interval = self.config_entry.options.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ) return timedelta(seconds=scan_interval) @property def option_unit_of_measurement(self): return self.config_entry.options.get( CONF_UNIT_OF_MEASUREMENT, DEFAULT_UNIT_OF_MEASUREMENT ) @property def signal_update(self): return f"{DOMAIN}-update-{self.name}" async def async_reset(self): for unsub_dispatcher in self.listeners: unsub_dispatcher() self.listeners = [] return True def connected(self): return self.api.connected() def set_value(self, path, param, value, mod_param, mod_value): return self.api.update(path, param, value, mod_param, mod_value) def execute(self, path, command, param, value): return self.api.execute(path, command, param, value) def run_script(self, name): if type(name) != str: if "router" not in name.data: return if self.config_entry.data["name"] != name.data.get("router"): return if "script" in name.data: name = name.data.get("script") else: return try: self.api.run_script(name) except ApiEntryNotFound as error: _LOGGER.error("Failed to run script: %s", error) def get_capabilities(self): packages = parse_api( data={}, source=self.api.path("/system/package"), key="name", vals=[ {"name": "name"}, { "name": "enabled", "source": "disabled", "type": "bool", "reverse": True, }, ], ) if "ppp" in packages: self.support_ppp = packages["ppp"]["enabled"] if "wireless" in packages: self.support_capsman = packages["wireless"]["enabled"] self.support_wireless = packages["wireless"]["enabled"] else: self.support_capsman = False self.support_wireless = False if self.major_fw_version >= 7: self.support_capsman = True self.support_wireless = True self.support_ppp = True async def async_get_host_hass(self): registry = await self.hass.helpers.entity_registry.async_get_registry() for entity in registry.entities.values(): if ( entity.config_entry_id == self.config_entry.entry_id and entity.domain == DEVICE_TRACKER_DOMAIN and "-host-" in entity.unique_id ): _, mac = entity.unique_id.split("-host-", 2) self.data["host_hass"][mac] = entity.original_name async def async_hwinfo_update(self): try: await asyncio.wait_for(self.lock.acquire(), timeout=30) except: return await self.hass.async_add_executor_job(self.get_firmware_update) if self.api.connected(): await self.hass.async_add_executor_job(self.get_capabilities) if self.api.connected(): await self.hass.async_add_executor_job(self.get_system_routerboard) if self.api.connected(): await self.hass.async_add_executor_job(self.get_system_resource) if self.api.connected() and self.option_sensor_scripts: await self.hass.async_add_executor_job(self.get_script) if self.api.connected(): await self.hass.async_add_executor_job(self.get_dhcp_network) if self.api.connected(): await self.hass.async_add_executor_job(self.get_dns) self.lock.release() @callback async def force_fwupdate_check(self, _now=None): await self.async_fwupdate_check() async def async_fwupdate_check(self): await self.hass.async_add_executor_job(self.get_firmware_update) async_dispatcher_send(self.hass, self.signal_update) @callback async def async_ping_tracked_hosts(self, _now=None): if not self.option_track_network_hosts: return try: await asyncio.wait_for(self.lock_ping.acquire(), timeout=3) except: return for uid in list(self.data["host"]): if not self.host_tracking_initialized: for key, default in zip( [ "address", "mac-address", "interface", "host-name", "last-seen", "available", ], ["unknown", "unknown", "unknown", "unknown", False, False], ): if key not in self.data["host"][uid]: self.data["host"][uid][key] = default if ( self.data["host"][uid]["source"] not in ["capsman", "wireless"] and self.data["host"][uid]["address"] != "unknown" and self.data["host"][uid]["interface"] != "unknown" ): tmp_interface = self.data["host"][uid]["interface"] if uid in self.data["arp"] and self.data["arp"][uid]["bridge"] != "": tmp_interface = self.data["arp"][uid]["bridge"] _LOGGER.debug( "Ping host: %s (%s)", uid, self.data["host"][uid]["address"] ) self.data["host"][uid][ "available" ] = await self.hass.async_add_executor_job( self.api_ping.arp_ping, self.data["host"][uid]["address"], tmp_interface, ) if self.data["host"][uid]["available"]: self.data["host"][uid]["last-seen"] = utcnow() self.host_tracking_initialized = True self.lock_ping.release() @callback async def force_update(self, _now=None): await self.async_update() async def async_update(self): if self.api.has_reconnected(): await self.async_hwinfo_update() try: await asyncio.wait_for(self.lock.acquire(), timeout=10) except: return await self.hass.async_add_executor_job(self.get_interface) if self.api.connected() and "available" not in self.data["fw-update"]: await self.async_fwupdate_check() if self.api.connected() and not self.data["host_hass"]: await self.async_get_host_hass() if self.api.connected() and self.support_capsman: await self.hass.async_add_executor_job(self.get_capsman_hosts) if self.api.connected() and self.support_wireless: await self.hass.async_add_executor_job(self.get_wireless_hosts) if self.api.connected(): await self.hass.async_add_executor_job(self.get_bridge) if self.api.connected(): await self.hass.async_add_executor_job(self.get_arp) if self.api.connected(): await self.hass.async_add_executor_job(self.get_dhcp) if self.api.connected(): await self.async_process_host() if self.api.connected() and self.option_sensor_port_traffic: await self.hass.async_add_executor_job(self.get_interface_traffic) if self.api.connected(): await self.hass.async_add_executor_job(self.process_interface_client) if self.api.connected() and self.option_sensor_nat: await self.hass.async_add_executor_job(self.get_nat) if self.api.connected() and self.option_sensor_kidcontrol: await self.hass.async_add_executor_job(self.get_kidcontrol) if self.api.connected() and self.option_sensor_mangle: await self.hass.async_add_executor_job(self.get_mangle) if self.api.connected() and self.option_sensor_filter: await self.hass.async_add_executor_job(self.get_filter) if self.api.connected() and self.support_ppp and self.option_sensor_ppp: await self.hass.async_add_executor_job(self.get_ppp) if self.api.connected(): await self.hass.async_add_executor_job(self.get_system_resource) if ( self.api.connected() and self.option_sensor_client_traffic and 0 < self.major_fw_version < 7 ): await self.hass.async_add_executor_job(self.process_accounting) if self.api.connected() and self.option_sensor_simple_queues: await self.hass.async_add_executor_job(self.get_queue) if self.api.connected() and self.option_sensor_environment: await self.hass.async_add_executor_job(self.get_environment) if self.api.connected(): await self.hass.async_add_executor_job(self.get_system_health) async_dispatcher_send(self.hass, self.signal_update) self.lock.release()
Apache License 2.0
lbryio/torba
torba/rpc/session.py
MessageSession.handle_message
python
async def handle_message(self, message): pass
message is a (command, payload) pair.
https://github.com/lbryio/torba/blob/190304344c0ff68f8a24cf50272307a11bf7f62b/torba/rpc/session.py#L324-L326
__all__ = ('Connector', 'RPCSession', 'MessageSession', 'Server', 'BatchError') import asyncio from asyncio import Event, CancelledError import logging import time from contextlib import suppress from torba.tasks import TaskGroup from .jsonrpc import Request, JSONRPCConnection, JSONRPCv2, JSONRPC, Batch, Notification from .jsonrpc import RPCError, ProtocolError from .framing import BadMagicError, BadChecksumError, OversizedPayloadError, BitcoinFramer, NewlineFramer from .util import Concurrency class Connector: def __init__(self, session_factory, host=None, port=None, proxy=None, **kwargs): self.session_factory = session_factory self.host = host self.port = port self.proxy = proxy self.loop = kwargs.get('loop', asyncio.get_event_loop()) self.kwargs = kwargs async def create_connection(self): connector = self.proxy or self.loop return await connector.create_connection( self.session_factory, self.host, self.port, **self.kwargs) async def __aenter__(self): transport, self.protocol = await self.create_connection() self.protocol.bw_limit = 0 return self.protocol async def __aexit__(self, exc_type, exc_value, traceback): await self.protocol.close() class SessionBase(asyncio.Protocol): max_errors = 10 def __init__(self, *, framer=None, loop=None): self.framer = framer or self.default_framer() self.loop = loop or asyncio.get_event_loop() self.logger = logging.getLogger(self.__class__.__name__) self.transport = None self._address = None self._proxy_address = None self.verbosity = 0 self._can_send = Event() self._can_send.set() self._pm_task = None self._task_group = TaskGroup(self.loop) self.max_send_delay = 60 self.start_time = time.time() self.errors = 0 self.send_count = 0 self.send_size = 0 self.last_send = self.start_time self.recv_count = 0 self.recv_size = 0 self.last_recv = self.start_time self.bw_limit = 2000000 self.bw_time = self.start_time self.bw_charge = 0 self.max_concurrent = 6 self._concurrency = Concurrency(self.max_concurrent) async def _update_concurrency(self): if self.bw_limit <= 0: return now = time.time() refund = (now - self.bw_time) * (self.bw_limit / 3600) self.bw_charge = max(0, self.bw_charge - int(refund)) self.bw_time = now throttle = int(self.bw_charge / self.bw_limit) target = max(1, self.max_concurrent - throttle) current = self._concurrency.max_concurrent if target != current: self.logger.info(f'changing task concurrency from {current} ' f'to {target}') await self._concurrency.set_max_concurrent(target) def _using_bandwidth(self, size): self.bw_charge += size async def _limited_wait(self, secs): try: await asyncio.wait_for(self._can_send.wait(), secs) except asyncio.TimeoutError: self.abort() raise asyncio.CancelledError(f'task timed out after {secs}s') async def _send_message(self, message): if not self._can_send.is_set(): await self._limited_wait(self.max_send_delay) if not self.is_closing(): framed_message = self.framer.frame(message) self.send_size += len(framed_message) self._using_bandwidth(len(framed_message)) self.send_count += 1 self.last_send = time.time() if self.verbosity >= 4: self.logger.debug(f'Sending framed message {framed_message}') self.transport.write(framed_message) def _bump_errors(self): self.errors += 1 if self.errors >= self.max_errors: self._close() def _close(self): if self.transport: self.transport.close() def data_received(self, framed_message): if self.verbosity >= 4: self.logger.debug(f'Received framed message {framed_message}') self.recv_size += len(framed_message) self._using_bandwidth(len(framed_message)) self.framer.received_bytes(framed_message) def pause_writing(self): if not self.is_closing(): self._can_send.clear() self.transport.pause_reading() def resume_writing(self): if not self._can_send.is_set(): self._can_send.set() self.transport.resume_reading() def connection_made(self, transport): self.transport = transport peer_address = transport.get_extra_info('peername') if self._address: self._proxy_address = peer_address else: self._address = peer_address self._pm_task = self.loop.create_task(self._receive_messages()) def connection_lost(self, exc): self._address = None self.transport = None self._task_group.cancel() self._pm_task.cancel() self._can_send.set() def default_framer(self): raise NotImplementedError def peer_address(self): return self._address def peer_address_str(self): if not self._address: return 'unknown' ip_addr_str, port = self._address[:2] if ':' in ip_addr_str: return f'[{ip_addr_str}]:{port}' else: return f'{ip_addr_str}:{port}' def is_closing(self): return not self.transport or self.transport.is_closing() def abort(self): if self.transport: self.transport.abort() async def close(self, *, force_after=30): self._close() if self._pm_task: with suppress(CancelledError): await asyncio.wait([self._pm_task], timeout=force_after) self.abort() await self._pm_task class MessageSession(SessionBase): async def _receive_messages(self): while not self.is_closing(): try: message = await self.framer.receive_message() except BadMagicError as e: magic, expected = e.args self.logger.error( f'bad network magic: got {magic} expected {expected}, ' f'disconnecting' ) self._close() except OversizedPayloadError as e: command, payload_len = e.args self.logger.error( f'oversized payload of {payload_len:,d} bytes to command ' f'{command}, disconnecting' ) self._close() except BadChecksumError as e: payload_checksum, claimed_checksum = e.args self.logger.warning( f'checksum mismatch: actual {payload_checksum.hex()} ' f'vs claimed {claimed_checksum.hex()}' ) self._bump_errors() else: self.last_recv = time.time() self.recv_count += 1 if self.recv_count % 10 == 0: await self._update_concurrency() await self._task_group.add(self._throttled_message(message)) async def _throttled_message(self, message): async with self._concurrency.semaphore: try: await self.handle_message(message) except ProtocolError as e: self.logger.error(f'{e}') self._bump_errors() except CancelledError: raise except Exception: self.logger.exception(f'exception handling {message}') self._bump_errors() def default_framer(self): return BitcoinFramer(bytes.fromhex('e3e1f3e8'), 128_000_000)
MIT License
docusign/docusign-python-client
docusign_esign/models/report_in_product_run_request.py
ReportInProductRunRequest.sort_direction
python
def sort_direction(self, sort_direction): self._sort_direction = sort_direction
Sets the sort_direction of this ReportInProductRunRequest. # noqa: E501 :param sort_direction: The sort_direction of this ReportInProductRunRequest. # noqa: E501 :type: str
https://github.com/docusign/docusign-python-client/blob/c6aeafff0d046fa6c10a398be83ba9e24b05d4ea/docusign_esign/models/report_in_product_run_request.py#L618-L627
import pprint import re import six from docusign_esign.client.configuration import Configuration class ReportInProductRunRequest(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'authentication_success_filter': 'str', 'custom_field_filter': 'str', 'date_range_custom_from_date': 'str', 'date_range_custom_to_date': 'str', 'date_range_filter': 'str', 'envelope_date_type_filter': 'str', 'envelope_recipient_name_contains_filter': 'str', 'envelope_status_filter': 'str', 'envelope_subject_contains_filter': 'str', 'fields': 'list[ReportInProductField]', 'for_download': 'str', 'is_dashboard': 'str', 'period_length_filter': 'str', 'report_customized_id': 'str', 'report_description': 'str', 'report_id': 'str', 'report_invocation_type': 'str', 'report_name': 'str', 'sent_by_filter': 'str', 'sent_by_ids': 'str', 'sort_direction': 'str', 'sort_field': 'str', 'start_position': 'str', 'verification_status_filter': 'str' } attribute_map = { 'authentication_success_filter': 'authenticationSuccessFilter', 'custom_field_filter': 'customFieldFilter', 'date_range_custom_from_date': 'dateRangeCustomFromDate', 'date_range_custom_to_date': 'dateRangeCustomToDate', 'date_range_filter': 'dateRangeFilter', 'envelope_date_type_filter': 'envelopeDateTypeFilter', 'envelope_recipient_name_contains_filter': 'envelopeRecipientNameContainsFilter', 'envelope_status_filter': 'envelopeStatusFilter', 'envelope_subject_contains_filter': 'envelopeSubjectContainsFilter', 'fields': 'fields', 'for_download': 'forDownload', 'is_dashboard': 'isDashboard', 'period_length_filter': 'periodLengthFilter', 'report_customized_id': 'reportCustomizedId', 'report_description': 'reportDescription', 'report_id': 'reportId', 'report_invocation_type': 'reportInvocationType', 'report_name': 'reportName', 'sent_by_filter': 'sentByFilter', 'sent_by_ids': 'sentByIds', 'sort_direction': 'sortDirection', 'sort_field': 'sortField', 'start_position': 'startPosition', 'verification_status_filter': 'verificationStatusFilter' } def __init__(self, _configuration=None, **kwargs): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._authentication_success_filter = None self._custom_field_filter = None self._date_range_custom_from_date = None self._date_range_custom_to_date = None self._date_range_filter = None self._envelope_date_type_filter = None self._envelope_recipient_name_contains_filter = None self._envelope_status_filter = None self._envelope_subject_contains_filter = None self._fields = None self._for_download = None self._is_dashboard = None self._period_length_filter = None self._report_customized_id = None self._report_description = None self._report_id = None self._report_invocation_type = None self._report_name = None self._sent_by_filter = None self._sent_by_ids = None self._sort_direction = None self._sort_field = None self._start_position = None self._verification_status_filter = None self.discriminator = None setattr(self, "_{}".format('authentication_success_filter'), kwargs.get('authentication_success_filter', None)) setattr(self, "_{}".format('custom_field_filter'), kwargs.get('custom_field_filter', None)) setattr(self, "_{}".format('date_range_custom_from_date'), kwargs.get('date_range_custom_from_date', None)) setattr(self, "_{}".format('date_range_custom_to_date'), kwargs.get('date_range_custom_to_date', None)) setattr(self, "_{}".format('date_range_filter'), kwargs.get('date_range_filter', None)) setattr(self, "_{}".format('envelope_date_type_filter'), kwargs.get('envelope_date_type_filter', None)) setattr(self, "_{}".format('envelope_recipient_name_contains_filter'), kwargs.get('envelope_recipient_name_contains_filter', None)) setattr(self, "_{}".format('envelope_status_filter'), kwargs.get('envelope_status_filter', None)) setattr(self, "_{}".format('envelope_subject_contains_filter'), kwargs.get('envelope_subject_contains_filter', None)) setattr(self, "_{}".format('fields'), kwargs.get('fields', None)) setattr(self, "_{}".format('for_download'), kwargs.get('for_download', None)) setattr(self, "_{}".format('is_dashboard'), kwargs.get('is_dashboard', None)) setattr(self, "_{}".format('period_length_filter'), kwargs.get('period_length_filter', None)) setattr(self, "_{}".format('report_customized_id'), kwargs.get('report_customized_id', None)) setattr(self, "_{}".format('report_description'), kwargs.get('report_description', None)) setattr(self, "_{}".format('report_id'), kwargs.get('report_id', None)) setattr(self, "_{}".format('report_invocation_type'), kwargs.get('report_invocation_type', None)) setattr(self, "_{}".format('report_name'), kwargs.get('report_name', None)) setattr(self, "_{}".format('sent_by_filter'), kwargs.get('sent_by_filter', None)) setattr(self, "_{}".format('sent_by_ids'), kwargs.get('sent_by_ids', None)) setattr(self, "_{}".format('sort_direction'), kwargs.get('sort_direction', None)) setattr(self, "_{}".format('sort_field'), kwargs.get('sort_field', None)) setattr(self, "_{}".format('start_position'), kwargs.get('start_position', None)) setattr(self, "_{}".format('verification_status_filter'), kwargs.get('verification_status_filter', None)) @property def authentication_success_filter(self): return self._authentication_success_filter @authentication_success_filter.setter def authentication_success_filter(self, authentication_success_filter): self._authentication_success_filter = authentication_success_filter @property def custom_field_filter(self): return self._custom_field_filter @custom_field_filter.setter def custom_field_filter(self, custom_field_filter): self._custom_field_filter = custom_field_filter @property def date_range_custom_from_date(self): return self._date_range_custom_from_date @date_range_custom_from_date.setter def date_range_custom_from_date(self, date_range_custom_from_date): self._date_range_custom_from_date = date_range_custom_from_date @property def date_range_custom_to_date(self): return self._date_range_custom_to_date @date_range_custom_to_date.setter def date_range_custom_to_date(self, date_range_custom_to_date): self._date_range_custom_to_date = date_range_custom_to_date @property def date_range_filter(self): return self._date_range_filter @date_range_filter.setter def date_range_filter(self, date_range_filter): self._date_range_filter = date_range_filter @property def envelope_date_type_filter(self): return self._envelope_date_type_filter @envelope_date_type_filter.setter def envelope_date_type_filter(self, envelope_date_type_filter): self._envelope_date_type_filter = envelope_date_type_filter @property def envelope_recipient_name_contains_filter(self): return self._envelope_recipient_name_contains_filter @envelope_recipient_name_contains_filter.setter def envelope_recipient_name_contains_filter(self, envelope_recipient_name_contains_filter): self._envelope_recipient_name_contains_filter = envelope_recipient_name_contains_filter @property def envelope_status_filter(self): return self._envelope_status_filter @envelope_status_filter.setter def envelope_status_filter(self, envelope_status_filter): self._envelope_status_filter = envelope_status_filter @property def envelope_subject_contains_filter(self): return self._envelope_subject_contains_filter @envelope_subject_contains_filter.setter def envelope_subject_contains_filter(self, envelope_subject_contains_filter): self._envelope_subject_contains_filter = envelope_subject_contains_filter @property def fields(self): return self._fields @fields.setter def fields(self, fields): self._fields = fields @property def for_download(self): return self._for_download @for_download.setter def for_download(self, for_download): self._for_download = for_download @property def is_dashboard(self): return self._is_dashboard @is_dashboard.setter def is_dashboard(self, is_dashboard): self._is_dashboard = is_dashboard @property def period_length_filter(self): return self._period_length_filter @period_length_filter.setter def period_length_filter(self, period_length_filter): self._period_length_filter = period_length_filter @property def report_customized_id(self): return self._report_customized_id @report_customized_id.setter def report_customized_id(self, report_customized_id): self._report_customized_id = report_customized_id @property def report_description(self): return self._report_description @report_description.setter def report_description(self, report_description): self._report_description = report_description @property def report_id(self): return self._report_id @report_id.setter def report_id(self, report_id): self._report_id = report_id @property def report_invocation_type(self): return self._report_invocation_type @report_invocation_type.setter def report_invocation_type(self, report_invocation_type): self._report_invocation_type = report_invocation_type @property def report_name(self): return self._report_name @report_name.setter def report_name(self, report_name): self._report_name = report_name @property def sent_by_filter(self): return self._sent_by_filter @sent_by_filter.setter def sent_by_filter(self, sent_by_filter): self._sent_by_filter = sent_by_filter @property def sent_by_ids(self): return self._sent_by_ids @sent_by_ids.setter def sent_by_ids(self, sent_by_ids): self._sent_by_ids = sent_by_ids @property def sort_direction(self): return self._sort_direction @sort_direction.setter
MIT License
clusterhq/flocker
admin/requirements.py
update_requirements_main
python
def update_requirements_main(args, base_path, top_level): options = UpdateRequirementsOptions() try: options.parseOptions(args) except UsageError as e: sys.stderr.write( u"{}\n" u"Usage Error: {}: {}\n".format( unicode(options), base_path.basename(), e ).encode('utf-8') ) raise SystemExit(1) requirements_directory = top_level.child('requirements') dockerfile = top_level.descendant(["admin", "requirements.Dockerfile"]) if not options["no-build"]: build_requirements_image( REQUIREMENTS_IMAGE, dockerfile, requirements_directory ) for infile in requirements_directory.globChildren("*.in"): requirements_from_infile(infile)
The main entry point for ``update-requirements``.
https://github.com/clusterhq/flocker/blob/eaa586248986d7cd681c99c948546c2b507e44de/admin/requirements.py#L85-L114
import sys from contextlib import contextmanager import os import shutil from subprocess import check_call from tempfile import NamedTemporaryFile, mkdtemp from twisted.python.usage import Options, UsageError from twisted.python.filepath import FilePath ALPINE_IMAGE = "alpine:latest" REQUIREMENTS_IMAGE = "clusterhq/flocker_update_requirements" @contextmanager def temporary_directory(suffix): temporary_directory = FilePath(mkdtemp(suffix=suffix)) try: yield temporary_directory finally: temporary_directory.remove() def requirements_from_infile(infile): outfile = infile.sibling(infile.basename()[:-len(".in")]) with NamedTemporaryFile( prefix="{}.".format(outfile.basename()), suffix=".created-by-update-requirements-entrypoint", dir=os.path.dirname(outfile.parent().path), delete=False, ) as temporary_outfile: print "PROCESSING", infile check_call( ["docker", "run", "--rm", "--volume", "{}:/requirements.txt".format(infile.path), REQUIREMENTS_IMAGE], stdout=temporary_outfile ) shutil.copymode(outfile.path, temporary_outfile.name) os.rename(temporary_outfile.name, outfile.path) class UpdateRequirementsOptions(Options): optFlags = [ ["no-build", False, "Do not rebuild the requirements Docker image."] ] def build_requirements_image(image_tag, dockerfile, requirements_directory): check_call( ["docker", "pull", ALPINE_IMAGE] ) with temporary_directory( suffix=".update-requirements.build_requirements_image" ) as docker_build_directory: dockerfile.copyTo( docker_build_directory.child('Dockerfile') ) dockerfile.sibling("update-requirements-entrypoint").copyTo( docker_build_directory.child('entrypoint') ) requirements_directory.copyTo( docker_build_directory.child('requirements') ) check_call( ["docker", "build", "--tag", image_tag, docker_build_directory.path] )
Apache License 2.0
wikimedia/pywikibot
pywikibot/userinterfaces/gui.py
EditBoxWindow.edit
python
def edit(self, text: str, jumpIndex: Optional[int] = None, highlight: Optional[str] = None): self.text = None self.editbox.insert(tkinter.END, text) self.editbox.tag_add('all', '1.0', tkinter.END) self.editbox.tag_config('all', wrap=tkinter.WORD) if highlight: self.find_all(highlight) if jumpIndex: line = text[:jumpIndex].count('\n') + 1 column = jumpIndex - (text[:jumpIndex].rfind('\n') + 1) self.editbox.see('{}.{}'.format(line, column)) self.parent.mainloop() return self.text
Provide user with editor to modify text. :param text: the text to be edited :param jumpIndex: position at which to put the caret :param highlight: each occurrence of this substring will be highlighted :return: the modified text, or None if the user didn't save the text file in his text editor :rtype: str or None
https://github.com/wikimedia/pywikibot/blob/5097f5b9a7ef9d39f35f17edd11faf3086a01d1d/pywikibot/userinterfaces/gui.py#L375-L406
import tkinter from tkinter import simpledialog as tkSimpleDialog from tkinter.scrolledtext import ScrolledText from typing import Optional import pywikibot from pywikibot import __url__ from pywikibot.backports import Tuple from pywikibot.tools import PYTHON_VERSION if PYTHON_VERSION >= (3, 6): from idlelib import replace as ReplaceDialog from idlelib import search as SearchDialog from idlelib.config import idleConf from idlelib.configdialog import ConfigDialog from idlelib.multicall import MultiCallCreator else: from idlelib import ReplaceDialog, SearchDialog from idlelib.configDialog import ConfigDialog from idlelib.configHandler import idleConf from idlelib.MultiCall import MultiCallCreator class TextEditor(ScrolledText): def __init__(self, master=None, **kwargs): textcf = self._initialize_config(idleConf.CurrentTheme()) if idleConf.GetOption('main', 'EditorWindow', 'font-bold', type='bool'): font_weight = 'bold' else: font_weight = 'normal' textcf['font'] = ( idleConf.GetOption('main', 'EditorWindow', 'font'), idleConf.GetOption('main', 'EditorWindow', 'font-size'), font_weight) textcf.update(kwargs) super().__init__(master, **textcf) def _initialize_config(self, Theme): config = { 'padx': 5, 'wrap': 'word', 'undo': 'True', 'width': idleConf.GetOption('main', 'EditorWindow', 'width'), 'height': idleConf.GetOption('main', 'EditorWindow', 'height'), } if PYTHON_VERSION >= (3, 7, 4): config['foreground'] = idleConf.GetHighlight( Theme, 'normal')['foreground'] config['background'] = idleConf.GetHighlight( Theme, 'normal')['background'] config['highlightcolor'] = idleConf.GetHighlight( Theme, 'hilite')['foreground'] config['highlightbackground'] = idleConf.GetHighlight( Theme, 'hilite')['background'] config['insertbackground'] = idleConf.GetHighlight( Theme, 'cursor')['foreground'] else: config['foreground'] = idleConf.GetHighlight( Theme, 'normal', fgBg='fg') config['background'] = idleConf.GetHighlight( Theme, 'normal', fgBg='bg') config['highlightcolor'] = idleConf.GetHighlight( Theme, 'hilite', fgBg='fg') config['highlightbackground'] = idleConf.GetHighlight( Theme, 'hilite', fgBg='bg') config['insertbackground'] = idleConf.GetHighlight( Theme, 'cursor', fgBg='fg') return config def add_bindings(self): self.bind('<<cut>>', self.cut) self.bind('<<copy>>', self.copy) self.bind('<<paste>>', self.paste) self.bind('<<select-all>>', self.select_all) self.bind('<<remove-selection>>', self.remove_selection) self.bind('<<find>>', self.find_event) self.bind('<<find-again>>', self.find_again_event) self.bind('<<find-selection>>', self.find_selection_event) self.bind('<<replace>>', self.replace_event) self.bind('<<goto-line>>', self.goto_line_event) self.bind('<<del-word-left>>', self.del_word_left) self.bind('<<del-word-right>>', self.del_word_right) keydefs = {'<<copy>>': ['<Control-Key-c>', '<Control-Key-C>'], '<<cut>>': ['<Control-Key-x>', '<Control-Key-X>'], '<<del-word-left>>': ['<Control-Key-BackSpace>'], '<<del-word-right>>': ['<Control-Key-Delete>'], '<<end-of-file>>': ['<Control-Key-d>', '<Control-Key-D>'], '<<find-again>>': ['<Control-Key-g>', '<Key-F3>'], '<<find-selection>>': ['<Control-Key-F3>'], '<<find>>': ['<Control-Key-f>', '<Control-Key-F>'], '<<goto-line>>': ['<Alt-Key-g>', '<Meta-Key-g>'], '<<paste>>': ['<Control-Key-v>', '<Control-Key-V>'], '<<redo>>': ['<Control-Shift-Key-Z>'], '<<remove-selection>>': ['<Key-Escape>'], '<<replace>>': ['<Control-Key-h>', '<Control-Key-H>'], '<<select-all>>': ['<Control-Key-a>'], '<<undo>>': ['<Control-Key-z>', '<Control-Key-Z>'], } for event, keylist in keydefs.items(): if keylist: self.event_add(event, *keylist) def cut(self, event): if self.tag_ranges('sel'): self.event_generate('<<Cut>>') return 'break' def copy(self, event): if self.tag_ranges('sel'): self.event_generate('<<Copy>>') return 'break' def paste(self, event): self.event_generate('<<Paste>>') return 'break' def select_all(self, event=None): self.tag_add('sel', '1.0', 'end-1c') self.mark_set('insert', '1.0') self.see('insert') return 'break' def remove_selection(self, event=None): self.tag_remove('sel', '1.0', 'end') self.see('insert') def del_word_left(self, event): self.event_generate('<Meta-Delete>') return 'break' def del_word_right(self, event=None): self.event_generate('<Meta-d>') return 'break' def find_event(self, event=None): if not self.tag_ranges('sel'): found = self.tag_ranges('found') if found: self.tag_add('sel', found[0], found[1]) else: self.tag_add('sel', '1.0', '1.0+1c') SearchDialog.find(self) return 'break' def find_again_event(self, event=None): SearchDialog.find_again(self) return 'break' def find_selection_event(self, event=None): SearchDialog.find_selection(self) return 'break' def replace_event(self, event=None): ReplaceDialog.replace(self) return 'break' def find_all(self, s): if hasattr(self, '_highlight') and self._highlight == s: try: if self.get(tkinter.SEL_FIRST, tkinter.SEL_LAST) == s: return self.find_selection_event(None) found = self.tag_nextrange('found', tkinter.SEL_LAST) except tkinter.TclError: found = self.tag_nextrange('found', tkinter.INSERT) if not found: found = self.tag_nextrange('found', 1.0) if found: self.do_highlight(found[0], found[1]) else: self.tag_remove('found', '1.0', tkinter.END) if s: self._highlight = s idx = '1.0' while True: idx = self.search(s, idx, nocase=1, stopindex=tkinter.END) if not idx: break lastidx = '{}+{}c'.format(idx, len(s)) self.tag_add('found', idx, lastidx) idx = lastidx self.tag_config('found', foreground='red') found = self.tag_nextrange('found', 1.0) if found: self.do_highlight(found[0], found[1]) return None def do_highlight(self, start, end): self.see(start) self.tag_remove(tkinter.SEL, '1.0', tkinter.END) self.tag_add(tkinter.SEL, start, end) self.focus_set() def goto_line_event(self, event): lineno = tkSimpleDialog.askinteger('Goto', 'Go to line number:', parent=self) if lineno is None: return 'break' if lineno <= 0: self.bell() return 'break' self.mark_set('insert', '{}.0'.format(lineno)) self.see('insert') return None class EditBoxWindow(tkinter.Frame): def __init__(self, parent=None, **kwargs): if parent is None: parent = tkinter.Tk() self.parent = parent super().__init__(parent) self.editbox = MultiCallCreator(TextEditor)(self, **kwargs) self.editbox.pack(side=tkinter.TOP) self.editbox.add_bindings() self.bind('<<open-config-dialog>>', self.config_dialog) bottom = tkinter.Frame(parent) bottom_left_frame = tkinter.Frame(bottom) self.textfield = tkinter.Entry(bottom_left_frame) self.textfield.pack(side=tkinter.LEFT, fill=tkinter.X, expand=1) buttonSearch = tkinter.Button(bottom_left_frame, text='Find next', command=self.find) buttonSearch.pack(side=tkinter.RIGHT) bottom_left_frame.pack(side=tkinter.LEFT, expand=1) bottom_right_frame = tkinter.Frame(bottom) buttonOK = tkinter.Button(bottom_right_frame, text='OK', command=self.pressedOK) buttonCancel = tkinter.Button(bottom_right_frame, text='Cancel', command=parent.destroy) buttonOK.pack(side=tkinter.LEFT, fill=tkinter.X) buttonCancel.pack(side=tkinter.RIGHT, fill=tkinter.X) bottom_right_frame.pack(side=tkinter.RIGHT, expand=1) bottom.pack(side=tkinter.TOP) menubar = tkinter.Menu(self.parent) findmenu = tkinter.Menu(menubar) findmenu.add_command(label='Find', command=self.editbox.find_event, accelerator='Ctrl+F', underline=0) findmenu.add_command(label='Find again', command=self.editbox.find_again_event, accelerator='Ctrl+G', underline=6) findmenu.add_command(label='Find all', command=self.find_all, underline=5) findmenu.add_command(label='Find selection', command=self.editbox.find_selection_event, accelerator='Ctrl+F3', underline=5) findmenu.add_command(label='Replace', command=self.editbox.replace_event, accelerator='Ctrl+H', underline=0) menubar.add_cascade(label='Find', menu=findmenu, underline=0) editmenu = tkinter.Menu(menubar) editmenu.add_command(label='Cut', command=self.editbox.cut, accelerator='Ctrl+X', underline=2) editmenu.add_command(label='Copy', command=self.editbox.copy, accelerator='Ctrl+C', underline=0) editmenu.add_command(label='Paste', command=self.editbox.paste, accelerator='Ctrl+V', underline=0) editmenu.add_separator() editmenu.add_command(label='Select all', command=self.editbox.select_all, accelerator='Ctrl+A', underline=7) editmenu.add_command(label='Clear selection', command=self.editbox.remove_selection, accelerator='Esc') menubar.add_cascade(label='Edit', menu=editmenu, underline=0) optmenu = tkinter.Menu(menubar) optmenu.add_command(label='Settings...', command=self.config_dialog, underline=0) menubar.add_cascade(label='Options', menu=optmenu, underline=0) self.parent.config(menu=menubar) self.pack()
MIT License
formalchemy/formalchemy
formalchemy/ext/pylons/controller.py
_RESTController.sync
python
def sync(self, fs, id=None): S = self.Session() if id: try: S.merge(fs.model) except AttributeError: S.update(fs.model) else: S.add(fs.model) S.commit()
sync a record. If ``id`` is None add a new record else save current one. Default is:: S = self.Session() if id: S.merge(fs.model) else: S.add(fs.model) S.commit()
https://github.com/formalchemy/formalchemy/blob/dd848678444541278e1fbefffc7445b19dfab6dc/formalchemy/ext/pylons/controller.py#L65-L86
import os from paste.urlparser import StaticURLParser from pylons import request, response, session, tmpl_context as c from pylons.controllers.util import abort, redirect from pylons.templating import render_mako as render from pylons import url from webhelpers.paginate import Page from sqlalchemy.orm import class_mapper, object_session from formalchemy.fields import _pk from formalchemy.fields import _stringify from formalchemy import Grid, FieldSet from formalchemy.i18n import get_translator from formalchemy.fields import Field from formalchemy import fatypes try: from formalchemy.ext.couchdb import Document except ImportError: Document = None import simplejson as json def model_url(*args, **kwargs): if 'model_name' in request.environ['pylons.routes_dict'] and 'model_name' not in kwargs: kwargs['model_name'] = request.environ['pylons.routes_dict']['model_name'] return url(*args, **kwargs) class Session(object): def add(self, record): def update(self, record): def delete(self, record): def commit(self): class _RESTController(object): template = '/forms/restfieldset.mako' engine = prefix_name = None FieldSet = FieldSet Grid = Grid pager_args = dict(link_attr={'class': 'ui-pager-link ui-state-default ui-corner-all'}, curpage_attr={'class': 'ui-pager-curpage ui-state-highlight ui-corner-all'}) @property def model_name(self): return request.environ['pylons.routes_dict'].get('model_name', None) def Session(self): return Session() def get_model(self): raise NotImplementedError()
MIT License
stackstorm/st2
contrib/runners/action_chain_runner/action_chain_runner/action_chain_runner.py
ChainHolder.validate
python
def validate(self): all_nodes = self._get_all_nodes(action_chain=self.actionchain) for node in self.actionchain.chain: on_success_node_name = node.on_success on_failure_node_name = node.on_failure valid_name = self._is_valid_node_name( all_node_names=all_nodes, node_name=on_success_node_name ) if not valid_name: msg = ( 'Unable to find node with name "%s" referenced in "on-success" in ' 'task "%s".' % (on_success_node_name, node.name) ) raise ValueError(msg) valid_name = self._is_valid_node_name( all_node_names=all_nodes, node_name=on_failure_node_name ) if not valid_name: msg = ( 'Unable to find node with name "%s" referenced in "on-failure" in ' 'task "%s".' % (on_failure_node_name, node.name) ) raise ValueError(msg) if self.actionchain.default: valid_name = self._is_valid_node_name( all_node_names=all_nodes, node_name=self.actionchain.default ) if not valid_name: msg = ( 'Unable to find node with name "%s" referenced in "default".' % self.actionchain.default ) raise ValueError(msg) return True
Function which performs a simple compile time validation. Keep in mind that some variables are only resolved during run time which means we can perform only simple validation during compile / create time.
https://github.com/stackstorm/st2/blob/924da218c7cb6ddcbffb551b45810214ffdf31cb/contrib/runners/action_chain_runner/action_chain_runner/action_chain_runner.py#L90-L136
from __future__ import absolute_import import eventlet import traceback import uuid import datetime import six from jsonschema import exceptions as json_schema_exc from oslo_config import cfg from st2common.runners.base import ActionRunner from st2common.runners.base import get_metadata as get_runner_metadata from st2common import log as logging from st2common.constants import action as action_constants from st2common.constants import pack as pack_constants from st2common.constants import keyvalue as kv_constants from st2common.content.loader import MetaLoader from st2common.exceptions import action as action_exc from st2common.exceptions import actionrunner as runner_exc from st2common.exceptions import db as db_exc from st2common.models.api.notification import NotificationsHelper from st2common.models.db.liveaction import LiveActionDB from st2common.models.system import actionchain from st2common.models.utils import action_param_utils from st2common.persistence.execution import ActionExecution from st2common.persistence.liveaction import LiveAction from st2common.services import action as action_service from st2common.services import keyvalues as kv_service from st2common.util import action_db as action_db_util from st2common.util import isotime from st2common.util import date as date_utils from st2common.util import jinja as jinja_utils from st2common.util import param as param_utils from st2common.util.config_loader import get_config from st2common.util.deep_copy import fast_deepcopy_dict __all__ = ["ActionChainRunner", "ChainHolder", "get_runner", "get_metadata"] LOG = logging.getLogger(__name__) RESULTS_KEY = "__results" JINJA_START_MARKERS = ["{{", "{%"] PUBLISHED_VARS_KEY = "published" class ChainHolder(object): def __init__(self, chainspec, chainname): self.actionchain = actionchain.ActionChain(**chainspec) self.chainname = chainname if not self.actionchain.default: default = self._get_default(self.actionchain) self.actionchain.default = default LOG.debug( "Using %s as default for %s.", self.actionchain.default, self.chainname ) if not self.actionchain.default: raise Exception("Failed to find default node in %s." % (self.chainname)) self.vars = {} def init_vars(self, action_parameters, action_context=None): if self.actionchain.vars: self.vars = self._get_rendered_vars( self.actionchain.vars, action_parameters=action_parameters, action_context=action_context, ) def restore_vars(self, ctx_vars): self.vars.update(fast_deepcopy_dict(ctx_vars))
Apache License 2.0
lukasruff/deep-sad-pytorch
src/baselines/ssad.py
SSAD.train
python
def train(self, dataset: BaseADDataset, device: str = 'cpu', n_jobs_dataloader: int = 0): logger = logging.getLogger() train_loader = DataLoader(dataset=dataset.train_set, batch_size=128, shuffle=True, num_workers=n_jobs_dataloader, drop_last=False) X = () semi_targets = [] for data in train_loader: inputs, _, semi_targets_batch, _ = data inputs, semi_targets_batch = inputs.to(device), semi_targets_batch.to(device) if self.hybrid: inputs = self.ae_net.encoder(inputs) X_batch = inputs.view(inputs.size(0), -1) X += (X_batch.cpu().data.numpy(),) semi_targets += semi_targets_batch.cpu().data.numpy().astype(np.int).tolist() X, semi_targets = np.concatenate(X), np.array(semi_targets) logger.info('Starting training...') gammas = np.logspace(-7, 2, num=10, base=2) best_auc = 0.0 _, test_loader = dataset.loaders(batch_size=128, num_workers=n_jobs_dataloader) X_test = () labels = [] for data in test_loader: inputs, label_batch, _, _ = data inputs, label_batch = inputs.to(device), label_batch.to(device) if self.hybrid: inputs = self.ae_net.encoder(inputs) X_batch = inputs.view(inputs.size(0), -1) X_test += (X_batch.cpu().data.numpy(),) labels += label_batch.cpu().data.numpy().astype(np.int64).tolist() X_test, labels = np.concatenate(X_test), np.array(labels) n_test, n_normal, n_outlier = len(X_test), np.sum(labels == 0), np.sum(labels == 1) n_val = int(0.1 * n_test) n_val_normal, n_val_outlier = int(n_val * (n_normal/n_test)), int(n_val * (n_outlier/n_test)) perm = np.random.permutation(n_test) X_val = np.concatenate((X_test[perm][labels[perm] == 0][:n_val_normal], X_test[perm][labels[perm] == 1][:n_val_outlier])) labels = np.array([0] * n_val_normal + [1] * n_val_outlier) i = 1 for gamma in gammas: kernel = pairwise_kernels(X, X, metric=self.kernel, gamma=gamma) model = ConvexSSAD(kernel, semi_targets, Cp=self.Cp, Cu=self.Cu, Cn=self.Cn) start_time = time.time() model.fit() train_time = time.time() - start_time kernel_val = pairwise_kernels(X_val, X[model.svs, :], metric=self.kernel, gamma=gamma) scores = (-1.0) * model.apply(kernel_val) scores = scores.flatten() auc = roc_auc_score(labels, scores) logger.info(f' | Model {i:02}/{len(gammas):02} | Gamma: {gamma:.8f} | Train Time: {train_time:.3f}s ' f'| Val AUC: {100. * auc:.2f} |') if auc > best_auc: best_auc = auc self.model = model self.gamma = gamma self.results['train_time'] = train_time i += 1 self.X_svs = X[self.model.svs, :] if self.hybrid: linear_kernel = pairwise_kernels(X, X, metric='linear') self.linear_model = ConvexSSAD(linear_kernel, semi_targets, Cp=self.Cp, Cu=self.Cu, Cn=self.Cn) start_time = time.time() self.linear_model.fit() train_time = time.time() - start_time self.results['train_time_linear'] = train_time self.linear_X_svs = X[self.linear_model.svs, :] logger.info(f'Best Model: | Gamma: {self.gamma:.8f} | AUC: {100. * best_auc:.2f}') logger.info('Training Time: {:.3f}s'.format(self.results['train_time'])) logger.info('Finished training.')
Trains the SSAD model on the training data.
https://github.com/lukasruff/deep-sad-pytorch/blob/2e7aca37412e7f09d42d48d9e722ddfb422c814a/src/baselines/ssad.py#L48-L146
import json import logging import time import torch import numpy as np from torch.utils.data import DataLoader from .shallow_ssad.ssad_convex import ConvexSSAD from sklearn.metrics import roc_auc_score from sklearn.metrics.pairwise import pairwise_kernels from base.base_dataset import BaseADDataset from networks.main import build_autoencoder class SSAD(object): def __init__(self, kernel='rbf', kappa=1.0, Cp=1.0, Cu=1.0, Cn=1.0, hybrid=False): self.kernel = kernel self.kappa = kappa self.Cp = Cp self.Cu = Cu self.Cn = Cn self.rho = None self.gamma = None self.model = None self.X_svs = None self.hybrid = hybrid self.ae_net = None self.linear_model = None self.linear_X_svs = None self.results = { 'train_time': None, 'test_time': None, 'test_auc': None, 'test_scores': None, 'train_time_linear': None, 'test_time_linear': None, 'test_auc_linear': None }
MIT License
executablebooks/sphinx-design
sphinx_design/icons.py
AllOcticons.run
python
def run(self) -> List[nodes.Node]: classes = self.options.get("class", []) list_node = nodes.bullet_list() for icon in list_octicons(): item_node = nodes.list_item() item_node.extend( ( nodes.literal(icon, icon), nodes.Text(": "), nodes.raw( "", nodes.Text(get_octicon(icon, classes=classes)), format="html", ), ) ) list_node += item_node return [list_node]
Run the directive.
https://github.com/executablebooks/sphinx-design/blob/c79d9d65a14491a5326e7e4c1191c7c1f6831f0a/sphinx_design/icons.py#L153-L171
import json import re from functools import lru_cache from typing import Any, Dict, List, Optional, Sequence, Tuple try: import importlib.resources as resources except ImportError: import importlib_resources as resources from docutils import nodes from docutils.parsers.rst import directives from sphinx.application import Sphinx from sphinx.util.docutils import SphinxDirective, SphinxRole from . import compiled OCTICON_VERSION = "0.0.0-dd899ea" OCTICON_CSS = """\ .octicon { display: inline-block; vertical-align: text-top; fill: currentColor; }""" def setup_icons(app: Sphinx) -> None: app.add_role("octicon", OcticonRole()) app.add_directive("_all-octicon", AllOcticons) for style in ["fa", "fas", "fab", "far"]: app.add_role(style, FontawesomeRole(style)) app.add_config_value("sd_fontawesome_latex", False, "env") app.connect("config-inited", add_fontawesome_pkg) app.add_node( fontawesome, html=(visit_fontawesome_html, depart_fontawesome_html), latex=(visit_fontawesome_latex, None), text=(None, None), man=(None, None), texinfo=(None, None), ) @lru_cache(1) def get_octicon_data() -> Dict[str, Any]: content = resources.read_text(compiled, "octicons.json") return json.loads(content) def list_octicons() -> List[str]: return list(get_octicon_data().keys()) HEIGHT_REGEX = re.compile(r"^(?P<value>\d+(\.\d+)?)(?P<unit>px|em|rem)$") def get_octicon( name: str, height: str = "1em", classes: Sequence[str] = (), aria_label: Optional[str] = None, ) -> str: try: data = get_octicon_data()[name] except KeyError: raise KeyError(f"Unrecognised octicon: {name}") match = HEIGHT_REGEX.match(height) if not match: raise ValueError( f"Invalid height: '{height}', must be format <integer><px|em|rem>" ) height_value = round(float(match.group("value")), 3) height_unit = match.group("unit") original_height = 16 if "16" not in data["heights"]: original_height = int(list(data["heights"].keys())[0]) elif "24" in data["heights"]: if height_unit == "px": if height_value >= 24: original_height = 24 elif height_value >= 1.5: original_height = 24 original_width = data["heights"][str(original_height)]["width"] width_value = round(original_width * height_value / original_height, 3) content = data["heights"][str(original_height)]["path"] options = { "version": "1.1", "width": f"{width_value}{height_unit}", "height": f"{height_value}{height_unit}", "class": " ".join(("sd-octicon", f"sd-octicon-{name}", *classes)), } options["viewBox"] = f"0 0 {original_width} {original_height}" if aria_label is not None: options["aria-label"] = aria_label options["role"] = "img" else: options["aria-hidden"] = "true" opt_string = " ".join(f'{k}="{v}"' for k, v in options.items()) return f"<svg {opt_string}>{content}</svg>" class OcticonRole(SphinxRole): def run(self) -> Tuple[List[nodes.Node], List[nodes.system_message]]: values = self.text.split(";") if ";" in self.text else [self.text] icon = values[0] height = "1em" if len(values) < 2 else values[1] classes = "" if len(values) < 3 else values[2] icon = icon.strip() try: svg = get_octicon(icon, height=height, classes=classes.split()) except Exception as exc: msg = self.inliner.reporter.error( f"Invalid octicon content: {exc}", line=self.lineno, ) prb = self.inliner.problematic(self.rawtext, self.rawtext, msg) return [prb], [msg] node = nodes.raw("", nodes.Text(svg), format="html") self.set_source_info(node) return [node], [] class AllOcticons(SphinxDirective): option_spec = { "class": directives.class_option, }
MIT License
opendistro-for-elasticsearch/sample-code
pa-to-es/main.py
MetricGatherer.get_all_metrics
python
def get_all_metrics(self): docs = [] for metric in metric_descriptions.get_working_metric_descriptions(): result = self.get_metric(metric) if result.status_code != 200: print("FAIL", metric, '\n', result.text) else: rp = ResultParser(metric, result.text, self.node_tracker) for doc in rp.records(): docs.append(doc) return docs
Loops through all the metric descriptions, sending one at a time, parsing the results, and returning a list of dicts, each one representing one future Elasticsearch document.
https://github.com/opendistro-for-elasticsearch/sample-code/blob/94abc715845d17fb4c24e70c7c328b2d4da0d581/pa-to-es/main.py#L45-L58
import argparse from datetime import datetime import json import metric_descriptions from node_tracker import NodeTracker from pytz import timezone import requests from result_parser import ResultParser class MetricGatherer(): def __init__(self, args): self.node_tracker = NodeTracker(args) self.args = args def to_url_params(self, metric_description): return "metrics={}&dim={}&agg={}&nodes=all".format( metric_description.name, ",".join(metric_description.dimensions), metric_description.agg) def get_metric(self, metric_description): BASE_URL = 'http://{}:9600/_opendistro/_performanceanalyzer/metrics?' BASE_URL = BASE_URL.format(self.args.endpoint) url = "{}{}".format(BASE_URL, self.to_url_params(metric_description)) return requests.get(url)
Apache License 2.0
aliyun/aliyun-log-python-sdk
aliyun/log/listtopicsrequest.py
ListTopicsRequest.get_token
python
def get_token(self): return self.token
Get start token to list topics :return: string, start token to list topics
https://github.com/aliyun/aliyun-log-python-sdk/blob/49b7b92798729d962268252dbbae9d7c098e60f8/aliyun/log/listtopicsrequest.py#L47-L52
from .logrequest import LogRequest class ListTopicsRequest(LogRequest): def __init__(self, project=None, logstore=None, token=None, line=None): LogRequest.__init__(self, project) self.logstore = logstore self.token = token self.line = line def get_logstore(self): return self.logstore if self.logstore else '' def set_logstore(self, logstore): self.logstore = logstore
MIT License
ceph/ceph-medic
ceph_medic/remote/commands.py
daemon_socket_config
python
def daemon_socket_config(conn, socket): try: output, _, exit_code = check( conn, ['ceph', '--admin-daemon', socket, 'config', 'show', '--format', 'json'] ) if exit_code != 0: conn.logger.error('Non zero exit status received, unable to retrieve information') return result = dict() try: result = json.loads(output[0]) except ValueError: conn.logger.exception( "failed to fetch ceph configuration via socket, invalid json: %s" % output[0] ) return result except RuntimeError: conn.logger.exception('failed to fetch ceph configuration via socket')
Capture daemon-based config from the socket
https://github.com/ceph/ceph-medic/blob/cf48c26636091eb11c4c00e09089680ce72b26bc/ceph_medic/remote/commands.py#L81-L102
import json from remoto.process import check def ceph_version(conn): try: output, _, exit_code = check(conn, ['ceph', '--version']) if exit_code != 0: conn.logger.error('Non zero exit status received, unable to retrieve information') return return output[0] except RuntimeError: conn.logger.exception('failed to fetch ceph version') def ceph_socket_version(conn, socket): try: result = dict() output, _, exit_code = check( conn, ['ceph', '--admin-daemon', socket, '--format', 'json', 'version'] ) if exit_code != 0: conn.logger.error('Non zero exit status received, unable to retrieve information') return result try: result = json.loads(output[0]) except ValueError: conn.logger.exception( "failed to fetch ceph socket version, invalid json: %s" % output[0] ) return result except RuntimeError: conn.logger.exception('failed to fetch ceph socket version') def ceph_status(conn): try: stdout, stderr, exit_code = check(conn, ['ceph', '-s', '--format', 'json']) result = dict() try: result = json.loads(''.join(stdout)) except ValueError: conn.logger.exception("failed to fetch ceph status, invalid json: %s" % ''.join(stdout)) if exit_code == 0: return result else: return {} except RuntimeError: conn.logger.exception('failed to fetch ceph status') def ceph_osd_dump(conn): try: stdout, stderr, exit_code = check(conn, ['ceph', 'osd', 'dump', '--format', 'json']) result = dict() if exit_code != 0: conn.logger.error('could not get osd dump from ceph') if stderr: for line in stderr: conn.logger.error(line) return result try: result = json.loads(''.join(stdout)) except ValueError: conn.logger.exception("failed to fetch osd dump, invalid json: %s" % ''.join(stdout)) return result except RuntimeError: conn.logger.exception('failed to fetch ceph osd dump')
MIT License
ufora/ufora
ufora/distributed/S3/InMemoryS3Interface.py
InMemoryS3InterfaceFactory.setDelayAfterWriteInjector
python
def setDelayAfterWriteInjector(self, injector): self.state_.delayAfterWriteFunction = injector
Cause the system to call a function to determine whether to delay writes. injector: function from (bucketname,key) -> seconds indicating whether to wait after posting the read. The store will be unlocked, but clients will be blocked.
https://github.com/ufora/ufora/blob/04db96ab049b8499d6d6526445f4f9857f1b6c7e/ufora/distributed/S3/InMemoryS3Interface.py#L88-L95
import ufora.distributed.S3.S3Interface as S3Interface import collections import logging import threading import time InMemoryS3Key = collections.namedtuple('InMemoryS3Key', ['value', 'mtime']) class InMemoryS3State(object): def __init__(self): self.writeFailureInjector = None self.delayAfterWriteFunction = None self.throughput = None self.clear() def clear(self): self.buckets_ = {} self.bucketOwners_ = {} self.lock = threading.RLock() self.throughput = None self.bytesLoadedPerMachine = {} def logBytesLoadedAndDelay(self, machine, data): with self.lock: if machine not in self.bytesLoadedPerMachine: self.bytesLoadedPerMachine[machine] = 0 self.bytesLoadedPerMachine[machine] += data if self.throughput is not None: time.sleep(float(data) / self.throughput) def setThroughputPerMachine(self, throughput): self.throughput = throughput def createBucket(self, bucketName, credentials): assert bucketName not in self.bucketOwners_ self.bucketOwners_[bucketName] = credentials self.buckets_[bucketName] = {} def validateAccess(self, bucketName, credentials): owner = self.bucketOwners_.get(bucketName) if owner is None: raise S3Interface.BucketNotFound(bucketName) if credentials != owner and owner != S3Interface.S3Interface.publicCredentials: logging.error("Access Denies: owner=%s, credentials=%s", owner, credentials) raise S3Interface.BucketAccessError(bucketName) def listBuckets(self, credentials): return [ name for name, owner in self.bucketOwners_.iteritems() if owner == credentials or owner == self.publicCredentials ] class InMemoryS3InterfaceFactory(S3Interface.S3InterfaceFactory): isCompatibleWithOutOfProcessDownloadPool = False def __init__(self, state=None, onMachine=None): self.state_ = state or InMemoryS3State() self.machine_ = onMachine def setWriteFailureInjector(self, injector): self.state_.writeFailureInjector = injector
Apache License 2.0
swimlane/pyews
pyews/core/core.py
Core._get_recursively
python
def _get_recursively(self, search_dict, field): fields_found = [] if search_dict: for key, value in search_dict.items(): if key == field: fields_found.append(value) elif isinstance(value, dict): results = self._get_recursively(value, field) for result in results: fields_found.append(result) elif isinstance(value, list): for item in value: if isinstance(item, dict): more_results = self._get_recursively(item, field) for another_result in more_results: fields_found.append(another_result) return fields_found
Takes a dict with nested lists and dicts, and searches all dicts for a key of the field provided.
https://github.com/swimlane/pyews/blob/61cc60226b347a881ce653acc7af276c26b37de9/pyews/core/core.py#L31-L52
import xmltodict import json from ..utils.logger import LoggingBase class Core(metaclass=LoggingBase): def camel_to_snake(self, s): if s != 'UserDN': return ''.join(['_'+c.lower() if c.isupper() else c for c in s]).lstrip('_') else: return 'user_dn' def __process_keys(self, key): return_value = key.replace('t:','') if return_value.startswith('@'): return_value = return_value.lstrip('@') return self.camel_to_snake(return_value) def _process_dict(self, obj): if isinstance(obj, dict): obj = { self.__process_keys(key): self._process_dict(value) for key, value in obj.items() } return obj
MIT License
ynop/audiomate
audiomate/corpus/conversion/base.py
AudioFileConverter.convert
python
def convert(self, corpus, target_audio_path): out_corpus = audiomate.Corpus() files_to_convert = [] for utterance in logger.progress( corpus.utterances.values(), total=corpus.num_utterances, description='Find utterances to convert'): if utterance.issuer.idx not in out_corpus.issuers.keys(): out_corpus.import_issuers(utterance.issuer) if self._does_utt_need_conversion(utterance): if self.separate_file_per_utterance: filename = '{}.{}'.format(utterance.idx, self._file_extension()) path = os.path.join(target_audio_path, filename) files_to_convert.append(( utterance.track.path, utterance.start, utterance.end, path )) track = out_corpus.new_file(path, utterance.idx) start = 0 end = float('inf') else: if utterance.track.idx not in out_corpus.tracks.keys(): filename = '{}.{}'.format(utterance.track.idx, self._file_extension()) path = os.path.join(target_audio_path, filename) files_to_convert.append(( utterance.track.path, 0, float('inf'), path )) out_corpus.new_file(path, utterance.track.idx) track = utterance.track start = utterance.start end = utterance.end utt = out_corpus.new_utterance( utterance.idx, track.idx, issuer_idx=utterance.issuer.idx, start=start, end=end ) lls = copy.deepcopy(list(utterance.label_lists.values())) utt.set_label_list(lls) else: self._copy_utterance_to_corpus(utterance, out_corpus) self._copy_subviews_to_corpus(corpus, out_corpus) self._convert_files(files_to_convert) return out_corpus
Convert the given corpus. Args: corpus (Corpus): The input corpus. target_audio_path (str): The path where the audio files of the converted corpus should be saved. Returns: Corpus: The newly created corpus.
https://github.com/ynop/audiomate/blob/080402eadaa81f77f64c8680510a2de64bc18e74/audiomate/corpus/conversion/base.py#L37-L112
import abc import copy import os import audiomate from audiomate import tracks from audiomate import logutil logger = logutil.getLogger() class AudioFileConverter(metaclass=abc.ABCMeta): def __init__(self, sampling_rate=16000, separate_file_per_utterance=False, force_conversion=False): self.sampling_rate = sampling_rate self.separate_file_per_utterance = separate_file_per_utterance self.force_conversion = force_conversion
MIT License
awslabs/dgl-lifesci
python/dgllife/model/model_zoo/dgmg.py
MoleculeEnv.num_atoms
python
def num_atoms(self): return self.dgl_graph.num_nodes()
Get the number of atoms for the current molecule. Returns ------- int
https://github.com/awslabs/dgl-lifesci/blob/ef58e803d2e7d8e0772292abfd59d1a6fa03c007/python/dgllife/model/model_zoo/dgmg.py#L111-L118
import dgl import torch import torch.nn as nn import torch.nn.functional as F import torch.nn.init as init from functools import partial from rdkit import Chem from torch.distributions import Categorical __all__ = ['DGMG'] class MoleculeEnv(object): def __init__(self, atom_types, bond_types): super(MoleculeEnv, self).__init__() self.atom_types = atom_types self.bond_types = bond_types self.atom_type_to_id = dict() self.bond_type_to_id = dict() for id, a_type in enumerate(atom_types): self.atom_type_to_id[a_type] = id for id, b_type in enumerate(bond_types): self.bond_type_to_id[b_type] = id def get_decision_sequence(self, mol, atom_order): decisions = [] old2new = dict() for new_id, old_id in enumerate(atom_order): atom = mol.GetAtomWithIdx(old_id) a_type = atom.GetSymbol() decisions.append((0, self.atom_type_to_id[a_type])) for bond in atom.GetBonds(): u = bond.GetBeginAtomIdx() v = bond.GetEndAtomIdx() if v == old_id: u, v = v, u if v in old2new: decisions.append((1, self.bond_type_to_id[bond.GetBondType()])) decisions.append((2, old2new[v])) decisions.append((1, len(self.bond_types))) old2new[old_id] = new_id decisions.append((0, len(self.atom_types))) return decisions def reset(self, rdkit_mol=False): self.dgl_graph = dgl.graph(([], []), idtype=torch.int32) self.dgl_graph.set_n_initializer(dgl.frame.zero_initializer) self.dgl_graph.set_e_initializer(dgl.frame.zero_initializer) self.mol = None if rdkit_mol: self.mol = Chem.RWMol(Chem.MolFromSmiles(''))
Apache License 2.0
jarryshaw/pypcapkit
pcapkit/const/hip/notify_message.py
NotifyMessage.get
python
def get(key, default=-1): if isinstance(key, int): return NotifyMessage(key) if key not in NotifyMessage._member_map_: extend_enum(NotifyMessage, key, default) return NotifyMessage[key]
Backport support for original codes.
https://github.com/jarryshaw/pypcapkit/blob/cfa778f606a111b2dc6eb57d1af054ba2689b578/pcapkit/const/hip/notify_message.py#L119-L125
from aenum import IntEnum, extend_enum __all__ = ['NotifyMessage'] class NotifyMessage(IntEnum): Reserved = 0 UNSUPPORTED_CRITICAL_PARAMETER_TYPE = 1 INVALID_SYNTAX = 7 NO_DH_PROPOSAL_CHOSEN = 14 INVALID_DH_CHOSEN = 15 NO_HIP_PROPOSAL_CHOSEN = 16 INVALID_HIP_CIPHER_CHOSEN = 17 NO_ESP_PROPOSAL_CHOSEN = 18 INVALID_ESP_TRANSFORM_CHOSEN = 19 UNSUPPORTED_HIT_SUITE = 20 AUTHENTICATION_FAILED = 24 Unassigned_25 = 25 CHECKSUM_FAILED = 26 Unassigned_27 = 27 HIP_MAC_FAILED = 28 ENCRYPTION_FAILED = 32 INVALID_HIT = 40 Unassigned_41 = 41 BLOCKED_BY_POLICY = 42 Unassigned_43 = 43 RESPONDER_BUSY_PLEASE_RETRY = 44 Unassigned_45 = 45 LOCATOR_TYPE_UNSUPPORTED = 46 Unassigned_47 = 47 CREDENTIALS_REQUIRED = 48 Unassigned_49 = 49 INVALID_CERTIFICATE = 50 REG_REQUIRED = 51 NO_VALID_NAT_TRAVERSAL_MODE_PARAMETER = 60 CONNECTIVITY_CHECKS_FAILED = 61 MESSAGE_NOT_RELAYED = 62 OVERLAY_TTL_EXCEEDED = 70 UNKNOWN_NEXT_HOP = 90 NO_VALID_HIP_TRANSPORT_MODE = 100 I2_ACKNOWLEDGEMENT = 16384 @staticmethod
BSD 3-Clause New or Revised License
kcyu2014/eval-nas
search_policies/cnn/search_space/nas_bench/sampler.py
run_random_search
python
def run_random_search(nasbench, max_time_budget=5e6): nasbench.reset_budget_counters() times, best_valids, best_tests = [0.0], [0.0], [0.0] while True: spec = random_spec() data = nasbench.query(spec) if data['validation_accuracy'] > best_valids[-1]: best_valids.append(data['validation_accuracy']) best_tests.append(data['test_accuracy']) else: best_valids.append(best_valids[-1]) best_tests.append(best_tests[-1]) time_spent, _ = nasbench.get_budget_counters() times.append(time_spent) if time_spent > max_time_budget: break return times, best_valids, best_tests
Run a single roll-out of random search to a fixed time budget.
https://github.com/kcyu2014/eval-nas/blob/385376a3ef96336b54ee7e696af1d02b97aa5c32/search_policies/cnn/search_space/nas_bench/sampler.py#L141-L164
import copy import json import IPython import numpy as np import matplotlib.pyplot as plt import random from nasbench import api from .nasbench_api_v2 import ModelSpec_v2 from .genotype import CONV3X3, INPUT, OUTPUT INPUT = 'input' OUTPUT = 'output' CONV3X3 = 'conv3x3-bn-relu' CONV1X1 = 'conv1x1-bn-relu' MAXPOOL3X3 = 'maxpool3x3' NUM_VERTICES = 7 MAX_EDGES = 9 EDGE_SPOTS = NUM_VERTICES * (NUM_VERTICES - 1) / 2 OP_SPOTS = NUM_VERTICES - 2 ALLOWED_OPS = [CONV3X3, CONV1X1, MAXPOOL3X3] ALLOWED_EDGES = [0, 1] def random_spec(nasbench): while True: matrix = np.random.choice(ALLOWED_EDGES, size=(NUM_VERTICES, NUM_VERTICES)) matrix = np.triu(matrix, 1) ops = np.random.choice(ALLOWED_OPS, size=(NUM_VERTICES)).tolist() ops[0] = INPUT ops[-1] = OUTPUT spec = api.ModelSpec(matrix=matrix, ops=ops) if nasbench.is_valid(spec): return spec def mutate_spec(nasbench, old_spec, mutation_rate=1.0): while True: new_matrix = copy.deepcopy(old_spec.original_matrix) new_ops = copy.deepcopy(old_spec.original_ops) edge_mutation_prob = mutation_rate / NUM_VERTICES for src in range(0, NUM_VERTICES - 1): for dst in range(src + 1, NUM_VERTICES): if random.random() < edge_mutation_prob: new_matrix[src, dst] = 1 - new_matrix[src, dst] op_mutation_prob = mutation_rate / OP_SPOTS for ind in range(1, NUM_VERTICES - 1): if random.random() < op_mutation_prob: available = [o for o in nasbench.config['available_ops'] if o != new_ops[ind]] new_ops[ind] = random.choice(available) new_spec = api.ModelSpec(new_matrix, new_ops) if nasbench.is_valid(new_spec): return new_spec def random_combination(iterable, sample_size): pool = tuple(iterable) n = len(pool) indices = sorted(random.sample(range(n), sample_size)) return tuple(pool[i] for i in indices) def run_evolution_search(nasbench, max_time_budget=5e6, population_size=50, tournament_size=10, mutation_rate=1.0): nasbench.reset_budget_counters() times, best_valids, best_tests = [0.0], [0.0], [0.0] population = [] for _ in range(population_size): spec = random_spec() data = nasbench.query(spec) time_spent, _ = nasbench.get_budget_counters() times.append(time_spent) population.append((data['validation_accuracy'], spec)) if data['validation_accuracy'] > best_valids[-1]: best_valids.append(data['validation_accuracy']) best_tests.append(data['test_accuracy']) else: best_valids.append(best_valids[-1]) best_tests.append(best_tests[-1]) if time_spent > max_time_budget: break while True: sample = random_combination(population, tournament_size) best_spec = sorted(sample, key=lambda i: i[0])[-1][1] new_spec = mutate_spec(best_spec, mutation_rate) data = nasbench.query(new_spec) time_spent, _ = nasbench.get_budget_counters() times.append(time_spent) population.append((data['validation_accuracy'], new_spec)) population.pop(0) if data['validation_accuracy'] > best_valids[-1]: best_valids.append(data['validation_accuracy']) best_tests.append(data['test_accuracy']) else: best_valids.append(best_valids[-1]) best_tests.append(best_tests[-1]) if time_spent > max_time_budget: break return times, best_valids, best_tests
MIT License
caserwin/simplerestfulml
src/service/compute_service.py
ComputeService.multiply
python
def multiply(a, b): return a * b
:param a: :param b: :return:
https://github.com/caserwin/simplerestfulml/blob/2fc5674a579fb7dc498f5e1cc6bbb888b6506906/src/service/compute_service.py#L30-L36
class ComputeService(object): def __init__(self): pass @staticmethod def plus(a, b): return a + b @staticmethod def divide(a, b): return a / float(b) @staticmethod
Apache License 2.0
bigmlcom/bigmler
bigmler/tsevaluation.py
evaluate
python
def evaluate(time_series_set, datasets, api, args, resume, session_file=None, path=None, log=None, fields=None, dataset_fields=None): output = args.predictions evaluations, resume = evaluations_process( time_series_set, datasets, fields, dataset_fields, api, args, resume, session_file=session_file, path=path, log=log) for evaluation in evaluations: evaluation = r.get_evaluation(evaluation, api, args.verbosity, session_file) if shared_changed(args.shared, evaluation): evaluation_args = {"shared": args.shared} evaluation = r.update_evaluation(evaluation, evaluation_args, args, api=api, path=path, session_file=session_file) file_name = output r.save_evaluation(evaluation, file_name, api) return resume
Evaluates a list of time-series with the given dataset
https://github.com/bigmlcom/bigmler/blob/91973ca1e752954302bf26bb22aa6874dc34ce69/bigmler/tsevaluation.py#L28-L49
import bigmler.utils as u import bigmler.resourcesapi.evaluations as r import bigmler.checkpoint as c from bigmler.resourcesapi.common import shared_changed
Apache License 2.0
openforcefield/openff-interchange
openff/interchange/components/potentials.py
WrappedPotential.parameters
python
def parameters(self): keys = { pot for pot in self._inner_data.data.keys() for pot in pot.parameters.keys() } params = dict() for key in keys: sum_ = 0.0 for pot, coeff in self._inner_data.data.items(): sum_ += coeff * pot.parameters[key] params.update({key: sum_}) return params
Get the parameters as represented by the stored potentials and coefficients.
https://github.com/openforcefield/openff-interchange/blob/a080e348b62c36c3c6a6b04e8afde64556f3186e/openff/interchange/components/potentials.py#L64-L76
import ast from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, Union from openff.toolkit.typing.engines.smirnoff.parameters import ParameterHandler from openff.utilities.utilities import has_package, requires_package from pydantic import Field, PrivateAttr, validator from openff.interchange.exceptions import MissingParametersError from openff.interchange.models import ( DefaultModel, PotentialKey, TopologyKey, VirtualSiteKey, ) from openff.interchange.types import ArrayQuantity, FloatQuantity if has_package("jax"): from jax import numpy else: import numpy if TYPE_CHECKING: from openff.interchange.components.mdtraj import _OFFBioTop class Potential(DefaultModel): parameters: Dict[str, FloatQuantity] = dict() map_key: Optional[int] = None @validator("parameters") def validate_parameters(cls, v): for key, val in v.items(): if isinstance(val, list): v[key] = ArrayQuantity.validate_type(val) else: v[key] = FloatQuantity.validate_type(val) return v def __hash__(self): return hash(tuple(self.parameters.values())) class WrappedPotential(DefaultModel): class InnerData(DefaultModel): data: Dict[Potential, float] _inner_data: InnerData = PrivateAttr() def __init__(self, data): if isinstance(data, Potential): self._inner_data = self.InnerData(data={data: 1.0}) elif isinstance(data, dict): self._inner_data = self.InnerData(data=data) @property
MIT License
hyperledger/aries-cloudagent-python
aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py
set_endorser_info
python
async def set_endorser_info(request: web.BaseRequest): context: AdminRequestContext = request["context"] connection_id = request.match_info["conn_id"] endorser_did = request.query.get("endorser_did") endorser_name = request.query.get("endorser_name") session = await context.session() try: record = await ConnRecord.retrieve_by_id(session, connection_id) except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except BaseModelError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err jobs = await record.metadata_get(session, "transaction_jobs") if not jobs: raise web.HTTPForbidden( reason=( "The transaction related jobs are not set up in " "connection metadata for this connection record" ) ) if "transaction_my_job" not in jobs.keys(): raise web.HTTPForbidden( reason=( 'The "transaction_my_job" is not set in "transaction_jobs"' " in connection metadata for this connection record" ) ) if jobs["transaction_my_job"] != TransactionJob.TRANSACTION_AUTHOR.name: raise web.HTTPForbidden( reason=( "Only a TRANSACTION_AUTHOR can add endorser_info " "to metadata of its connection record" ) ) value = await record.metadata_get(session, "endorser_info") if value: value["endorser_did"] = endorser_did value["endorser_name"] = endorser_name else: value = {"endorser_did": endorser_did, "endorser_name": endorser_name} await record.metadata_set(session, key="endorser_info", value=value) endorser_info = await record.metadata_get(session, "endorser_info") return web.json_response(endorser_info)
Request handler for assigning endorser information. Args: request: aiohttp request object Returns: The assigned endorser information
https://github.com/hyperledger/aries-cloudagent-python/blob/fec69f1a2301e4745fc9d40cea190050e3f595fa/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py#L599-L653
import json from typing import Optional from aiohttp import web from aiohttp_apispec import ( docs, response_schema, querystring_schema, request_schema, match_info_schema, ) from marshmallow import fields, validate from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....indy.issuer import IndyIssuerError from ....ledger.error import LedgerError from ....messaging.models.base import BaseModelError from ....messaging.models.openapi import OpenAPISchema from ....messaging.valid import UUIDFour from ....storage.error import StorageError, StorageNotFoundError from ....wallet.base import BaseWallet from .manager import TransactionManager, TransactionManagerError from .models.transaction_record import TransactionRecord, TransactionRecordSchema from .transaction_jobs import TransactionJob class TransactionListSchema(OpenAPISchema): results = fields.List( fields.Nested(TransactionRecordSchema()), description="List of transaction records", ) class TransactionsListQueryStringSchema(OpenAPISchema): class TranIdMatchInfoSchema(OpenAPISchema): tran_id = fields.Str( description="Transaction identifier", required=True, example=UUIDFour.EXAMPLE ) class AssignTransactionJobsSchema(OpenAPISchema): transaction_my_job = fields.Str( description="Transaction related jobs", required=False, validate=validate.OneOf( [r.name for r in TransactionJob if isinstance(r.value[0], int)] + ["reset"] ), ) class TransactionJobsSchema(OpenAPISchema): transaction_my_job = fields.Str( description="My transaction related job", required=False, validate=validate.OneOf( [r.name for r in TransactionJob if isinstance(r.value[0], int)] + ["reset"] ), ) transaction_their_job = fields.Str( description="Their transaction related job", required=False, validate=validate.OneOf( [r.name for r in TransactionJob if isinstance(r.value[0], int)] + ["reset"] ), ) class TransactionConnIdMatchInfoSchema(OpenAPISchema): conn_id = fields.Str( description="Connection identifier", required=True, example=UUIDFour.EXAMPLE ) class DateSchema(OpenAPISchema): expires_time = fields.DateTime( description="Expiry Date", required=True, example="2021-03-29T05:22:19Z" ) class EndorserWriteLedgerTransactionSchema(OpenAPISchema): endorser_write_txn = fields.Boolean( description="Endorser will write the transaction after endorsing it", required=False, ) class EndorserInfoSchema(OpenAPISchema): endorser_did = fields.Str( description="Endorser DID", required=True, ) endorser_name = fields.Str( description="Endorser Name", required=False, ) @docs( tags=["endorse-transaction"], summary="Query transactions", ) @querystring_schema(TransactionsListQueryStringSchema()) @response_schema(TransactionListSchema(), 200) async def transactions_list(request: web.BaseRequest): context: AdminRequestContext = request["context"] tag_filter = {} post_filter = {} try: async with context.session() as session: records = await TransactionRecord.query( session, tag_filter, post_filter_positive=post_filter, alt=True ) results = [record.serialize() for record in records] except (StorageError, BaseModelError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err return web.json_response({"results": results}) @docs(tags=["endorse-transaction"], summary="Fetch a single transaction record") @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) async def transactions_retrieve(request: web.BaseRequest): context: AdminRequestContext = request["context"] transaction_id = request.match_info["tran_id"] try: async with context.session() as session: record = await TransactionRecord.retrieve_by_id(session, transaction_id) result = record.serialize() except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except BaseModelError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err return web.json_response(result) @docs( tags=["endorse-transaction"], summary="For author to send a transaction request", ) @querystring_schema(TranIdMatchInfoSchema()) @querystring_schema(EndorserWriteLedgerTransactionSchema()) @request_schema(DateSchema()) @response_schema(TransactionRecordSchema(), 200) async def transaction_create_request(request: web.BaseRequest): context: AdminRequestContext = request["context"] outbound_handler = request["outbound_message_router"] transaction_id = request.query.get("tran_id") endorser_write_txn = json.loads(request.query.get("endorser_write_txn", "false")) body = await request.json() expires_time = body.get("expires_time") try: async with context.session() as session: transaction_record = await TransactionRecord.retrieve_by_id( session, transaction_id ) connection_record = await ConnRecord.retrieve_by_id( session, transaction_record.connection_id ) except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except BaseModelError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err session = await context.session() jobs = await connection_record.metadata_get(session, "transaction_jobs") if not jobs: raise web.HTTPForbidden( reason=( "The transaction related jobs are not set up in " "connection metadata for this connection record" ) ) if "transaction_my_job" not in jobs.keys(): raise web.HTTPForbidden( reason=( 'The "transaction_my_job" is not set in "transaction_jobs" ' "connection metadata for this connection record" ) ) if "transaction_their_job" not in jobs.keys(): raise web.HTTPForbidden( reason=( 'Ask the other agent to set up "transaction_my_job" in ' '"transaction_jobs" in connection metadata for their connection record' ) ) if jobs["transaction_my_job"] != TransactionJob.TRANSACTION_AUTHOR.name: raise web.HTTPForbidden(reason="Only a TRANSACTION_AUTHOR can create a request") if jobs["transaction_their_job"] != TransactionJob.TRANSACTION_ENDORSER.name: raise web.HTTPForbidden( reason="A request can only be created to a TRANSACTION_ENDORSER" ) transaction_mgr = TransactionManager(session) try: transaction_record, transaction_request = await transaction_mgr.create_request( transaction=transaction_record, expires_time=expires_time, endorser_write_txn=endorser_write_txn, ) except (StorageError, TransactionManagerError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err await outbound_handler( transaction_request, connection_id=connection_record.connection_id ) return web.json_response(transaction_record.serialize()) @docs( tags=["endorse-transaction"], summary="For Endorser to endorse a particular transaction record", ) @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) async def endorse_transaction_response(request: web.BaseRequest): context: AdminRequestContext = request["context"] outbound_handler = request["outbound_message_router"] transaction_id = request.match_info["tran_id"] try: async with context.session() as session: transaction = await TransactionRecord.retrieve_by_id( session, transaction_id ) connection_record = await ConnRecord.retrieve_by_id( session, transaction.connection_id ) except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except BaseModelError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err session = await context.session() jobs = await connection_record.metadata_get(session, "transaction_jobs") if not jobs: raise web.HTTPForbidden( reason=( "The transaction related jobs are not set up in " "connection metadata for this connection record" ) ) if jobs["transaction_my_job"] != TransactionJob.TRANSACTION_ENDORSER.name: raise web.HTTPForbidden( reason="Only a TRANSACTION_ENDORSER can endorse a transaction" ) transaction_mgr = TransactionManager(session) try: ( transaction, endorsed_transaction_response, ) = await transaction_mgr.create_endorse_response( transaction=transaction, state=TransactionRecord.STATE_TRANSACTION_ENDORSED, ) except (IndyIssuerError, LedgerError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err except (StorageError, TransactionManagerError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err await outbound_handler( endorsed_transaction_response, connection_id=transaction.connection_id ) return web.json_response(transaction.serialize()) @docs( tags=["endorse-transaction"], summary="For Endorser to refuse a particular transaction record", ) @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) async def refuse_transaction_response(request: web.BaseRequest): context: AdminRequestContext = request["context"] outbound_handler = request["outbound_message_router"] session = await context.session() wallet: Optional[BaseWallet] = session.inject_or(BaseWallet) if not wallet: raise web.HTTPForbidden(reason="No wallet available") refuser_did_info = await wallet.get_public_did() if not refuser_did_info: raise web.HTTPForbidden( reason="Transaction cannot be refused as there is no Public DID in wallet" ) refuser_did = refuser_did_info.did transaction_id = request.match_info["tran_id"] try: async with context.session() as session: transaction = await TransactionRecord.retrieve_by_id( session, transaction_id ) connection_record = await ConnRecord.retrieve_by_id( session, transaction.connection_id ) except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except BaseModelError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err session = await context.session() jobs = await connection_record.metadata_get(session, "transaction_jobs") if not jobs: raise web.HTTPForbidden( reason=( "The transaction related jobs are not set up in " "connection metadata for this connection record" ) ) if jobs["transaction_my_job"] != TransactionJob.TRANSACTION_ENDORSER.name: raise web.HTTPForbidden( reason="Only a TRANSACTION_ENDORSER can refuse a transaction" ) try: transaction_mgr = TransactionManager(session) ( transaction, refused_transaction_response, ) = await transaction_mgr.create_refuse_response( transaction=transaction, state=TransactionRecord.STATE_TRANSACTION_REFUSED, refuser_did=refuser_did, ) except (StorageError, TransactionManagerError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err await outbound_handler( refused_transaction_response, connection_id=transaction.connection_id ) return web.json_response(transaction.serialize()) @docs( tags=["endorse-transaction"], summary="For Author to cancel a particular transaction request", ) @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) async def cancel_transaction(request: web.BaseRequest): context: AdminRequestContext = request["context"] outbound_handler = request["outbound_message_router"] transaction_id = request.match_info["tran_id"] try: async with context.session() as session: transaction = await TransactionRecord.retrieve_by_id( session, transaction_id ) connection_record = await ConnRecord.retrieve_by_id( session, transaction.connection_id ) except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except BaseModelError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err session = await context.session() jobs = await connection_record.metadata_get(session, "transaction_jobs") if not jobs: raise web.HTTPForbidden( reason=( "The transaction related jobs are not set up in " "connection metadata for this connection record" ) ) if jobs["transaction_my_job"] != TransactionJob.TRANSACTION_AUTHOR.name: raise web.HTTPForbidden( reason="Only a TRANSACTION_AUTHOR can cancel a transaction" ) transaction_mgr = TransactionManager(session) try: ( transaction, cancelled_transaction_response, ) = await transaction_mgr.cancel_transaction( transaction=transaction, state=TransactionRecord.STATE_TRANSACTION_CANCELLED ) except (StorageError, TransactionManagerError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err await outbound_handler( cancelled_transaction_response, connection_id=transaction.connection_id ) return web.json_response(transaction.serialize()) @docs( tags=["endorse-transaction"], summary="For Author to resend a particular transaction request", ) @match_info_schema(TranIdMatchInfoSchema()) @response_schema(TransactionRecordSchema(), 200) async def transaction_resend(request: web.BaseRequest): context: AdminRequestContext = request["context"] outbound_handler = request["outbound_message_router"] transaction_id = request.match_info["tran_id"] try: async with context.session() as session: transaction = await TransactionRecord.retrieve_by_id( session, transaction_id ) connection_record = await ConnRecord.retrieve_by_id( session, transaction.connection_id ) except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except BaseModelError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err session = await context.session() jobs = await connection_record.metadata_get(session, "transaction_jobs") if not jobs: raise web.HTTPForbidden( reason=( "The transaction related jobs are not set up in " "connection metadata for this connection record" ) ) if jobs["transaction_my_job"] != TransactionJob.TRANSACTION_AUTHOR.name: raise web.HTTPForbidden( reason="Only a TRANSACTION_AUTHOR can resend a transaction" ) try: transaction_mgr = TransactionManager(session) ( transaction, resend_transaction_response, ) = await transaction_mgr.transaction_resend( transaction=transaction, state=TransactionRecord.STATE_TRANSACTION_RESENT ) except (StorageError, TransactionManagerError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err await outbound_handler( resend_transaction_response, connection_id=transaction.connection_id ) return web.json_response(transaction.serialize()) @docs( tags=["endorse-transaction"], summary="Set transaction jobs", ) @querystring_schema(AssignTransactionJobsSchema()) @match_info_schema(TransactionConnIdMatchInfoSchema()) @response_schema(TransactionJobsSchema(), 200) async def set_endorser_role(request: web.BaseRequest): context: AdminRequestContext = request["context"] outbound_handler = request["outbound_message_router"] connection_id = request.match_info["conn_id"] transaction_my_job = request.query.get("transaction_my_job") session = await context.session() try: record = await ConnRecord.retrieve_by_id(session, connection_id) except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except BaseModelError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err transaction_mgr = TransactionManager(session) tx_job_to_send = await transaction_mgr.set_transaction_my_job( record=record, transaction_my_job=transaction_my_job ) jobs = await record.metadata_get(session, "transaction_jobs") await outbound_handler(tx_job_to_send, connection_id=connection_id) return web.json_response(jobs) @docs( tags=["endorse-transaction"], summary="Set Endorser Info", ) @querystring_schema(EndorserInfoSchema()) @match_info_schema(TransactionConnIdMatchInfoSchema()) @response_schema(EndorserInfoSchema(), 200)
Apache License 2.0
redhatqe/wrapanapi
wrapanapi/entities/vm.py
Vm.start
python
def start(self):
Starts the VM/instance. Blocks until task completes. Returns: True if vm action has been initiated properly
https://github.com/redhatqe/wrapanapi/blob/ba7901bf64bfded79f5e3e99acc2379770bdb425/wrapanapi/entities/vm.py#L349-L354
import time from abc import ABCMeta, abstractmethod, abstractproperty from cached_property import cached_property_with_ttl from wait_for import wait_for, TimedOutError from wrapanapi.const import CACHED_PROPERTY_TTL from wrapanapi.exceptions import MultipleItemsError, NotFoundError from wrapanapi.entities.base import Entity, EntityMixin class VmState(object): RUNNING = 'VmState.RUNNING' STOPPED = 'VmState.STOPPED' PAUSED = 'VmState.PAUSED' SUSPENDED = 'VmState.SUSPENDED' DELETED = 'VmState.DELETED' STARTING = 'VmState.STARTING' STOPPING = 'VmState.STOPPING' ERROR = 'VmState.ERROR' UNKNOWN = 'VmState.UNKNOWN' SHELVED = 'VmState.SHELVED' SHELVED_OFFLOADED = 'VmState.SHELVED_OFFLOADED' @classmethod def valid_states(cls): return [ var_val for var_val in vars(cls).values() if isinstance(var_val, str) and var_val.startswith('VmState.') ] class Vm(Entity, metaclass=ABCMeta): state_map = None def __init__(self, *args, **kwargs): state_map = getattr(self, 'state_map') if (not state_map or not isinstance(state_map, dict) or not all(value in VmState.valid_states() for value in state_map.values())): raise NotImplementedError( "property '{}' not properly implemented in class '{}'" .format('state_map', self.__class__.__name__) ) super(Vm, self).__init__(*args, **kwargs) def _api_state_to_vmstate(self, api_state): try: return self.state_map[api_state] except KeyError: self.logger.warn( "Unmapped VM state '%s' received from system, mapped to '%s'", api_state, VmState.UNKNOWN ) return VmState.UNKNOWN @property def exists(self): try: state = self._get_state() exists = True except NotFoundError: exists = False if exists: if state == VmState.DELETED: exists = False return exists @abstractmethod def _get_state(self): @cached_property_with_ttl(ttl=CACHED_PROPERTY_TTL) def state(self): return self._get_state() @property def is_running(self): return self.state == VmState.RUNNING @property def is_started(self): return self.is_running @property def is_stopped(self): return self.state == VmState.STOPPED @property def is_paused(self): return self.state == VmState.PAUSED @property def is_suspended(self): return self.state == VmState.SUSPENDED @property def is_starting(self): return self.state == VmState.STARTING @property def is_stopping(self): return self.state == VmState.STOPPING @abstractproperty def ip(self): @abstractproperty def all_ips(self): @abstractproperty def creation_time(self): def wait_for_state(self, state, timeout='6m', delay=15): valid_states = list(self.state_map.values()) if state not in valid_states: self.logger.error( "Invalid desired state. Valid states for %s: %s", self.__class__.__name__, valid_states ) raise ValueError('Invalid desired state') wait_for( lambda: self.state == state, timeout=timeout, delay=delay, message="wait for vm {} to reach state '{}'".format(self._log_id, state)) def _handle_transition(self, in_desired_state, in_state_requiring_prep, in_actionable_state, do_prep, do_action, state, timeout, delay): def _transition(): if in_desired_state(): time.sleep(CACHED_PROPERTY_TTL + 0.1) if in_desired_state(): return True else: return False elif in_state_requiring_prep(): self.logger.info( "VM %s in state requiring prep. current state: %s, ensuring state: %s)", self._log_id, self.state, state ) do_prep() return False elif in_actionable_state(): self.logger.info( "VM %s in actionable state. current state: %s, ensuring state: %s)", self._log_id, self.state, state ) do_action() return False return wait_for( _transition, timeout=timeout, delay=delay, message="ensure vm {} reaches state '{}'".format(self._log_id, state) ) def ensure_state(self, state, timeout='6m', delay=5): valid_states = list(self.state_map.values()) if state not in valid_states: self.logger.error( "Invalid desired state. Valid states for %s: %s", self.__class__.__name__, valid_states ) raise ValueError('Invalid desired state') if state == VmState.RUNNING: return self._handle_transition( in_desired_state=lambda: self.is_running, in_state_requiring_prep=lambda: False, in_actionable_state=lambda: self.is_stopped or self.is_suspended or self.is_paused, do_prep=lambda: None, do_action=self.start, state=state, timeout=timeout, delay=delay ) elif state == VmState.STOPPED: return self._handle_transition( in_desired_state=lambda: self.is_stopped, in_state_requiring_prep=lambda: (self.is_suspended or self.is_paused or self.is_starting), in_actionable_state=lambda: self.is_running, do_prep=self.start, do_action=self.stop, state=state, timeout=timeout, delay=delay ) elif state == VmState.SUSPENDED: if not self.system.can_suspend: raise ValueError( 'System {} is unable to suspend'.format(self.system.__class__.__name__)) return self._handle_transition( in_desired_state=lambda: self.is_suspended, in_state_requiring_prep=lambda: self.is_stopped or self.is_paused, in_actionable_state=lambda: self.is_running, do_prep=self.start, do_action=self.suspend, state=state, timeout=timeout, delay=delay ) elif state == VmState.PAUSED: if not self.system.can_pause: raise ValueError( 'System {} is unable to pause'.format(self.system.__class__.__name__)) return self._handle_transition( in_desired_state=lambda: self.is_paused, in_state_requiring_prep=lambda: self.is_stopped or self.is_suspended, in_actionable_state=lambda: self.is_running, do_prep=self.start, do_action=self.pause, state=state, timeout=timeout, delay=delay ) else: raise ValueError("Invalid desired state '{}'".format(state)) @property def in_steady_state(self): return self.state in [VmState.RUNNING, VmState.STOPPED, VmState.PAUSED, VmState.SUSPENDED] def wait_for_steady_state(self, timeout=None, delay=5): try: return wait_for( lambda: self.in_steady_state, timeout=timeout if timeout else self.system.steady_wait_time, delay=delay, message="VM/Instance '{}' in steady state".format(self._log_id) ) except TimedOutError: self.logger.exception( "VM %s stuck in '%s' while waiting for steady state.", self._log_id, self.state) raise @abstractmethod
MIT License
facebookresearch/dynabench
legacy/torchserve/tasks/nli/r3/handler.py
NliTransformerHandler.initialize
python
def initialize(self, ctx): model_dir, model_pt_path, self.device, self.setup_config = handler_initialize( ctx ) device_num = -1 self.model_name = "roberta_1" self.roberta_model_name = self.setup_config["model_name"] max_input_l = self.setup_config["max_length"] num_labels = self.setup_config["num_labels"] self.my_task_id = self.setup_config["my_task_id"] self.my_round_id = self.setup_config["my_round_id"] self.device_num = device_num logger.info("--------------- Stage 1 ------------------- ") self.cur_roberta = torch.hub.load("pytorch/fairseq", self.roberta_model_name) self.model = RoBertaSeqClassification(self.cur_roberta, num_labels=num_labels) logger.info("The Roberta classification created") logger.info("--------------- Stage 2 ------------------- ") if torch.cuda.is_available() and device_num != -1: self.model.load_state_dict(torch.load(model_pt_path)) else: self.model.load_state_dict(torch.load(model_pt_path, map_location="cpu")) logger.info("The state_dict loaded") self.model.to(self.device) logger.info("--------------- Stage 3 ------------------- ") self.cs_reader = RoBertaNLIReader( self.cur_roberta, lazy=False, example_filter=None, max_seq_l=max_input_l ) logger.info("The RoBertaNLIReader created") logger.info("--------------- Stage 4 ------------------- ") unk_token_num = {"tokens": 1} vocab = ExVocabulary(unk_token_num=unk_token_num) vocab.add_token_to_namespace("e", namespace="labels") vocab.add_token_to_namespace("n", namespace="labels") vocab.add_token_to_namespace("c", namespace="labels") vocab.add_token_to_namespace("h", namespace="labels") vocab.change_token_with_index_to_namespace("h", -2, namespace="labels") self.biterator = BasicIterator(batch_size=32) self.biterator.index_with(vocab) logger.info("--------------- Stage 5 ------------------- ") self.lig = LayerIntegratedGradients( captum_nli_forward_func, self.model.roberta.model.decoder.sentence_encoder.embed_tokens, ) self.initialized = True
Initializes the model and tokenizer during server start up
https://github.com/facebookresearch/dynabench/blob/e534f68dd13796f39e8c9825affc36e1c959182f/legacy/torchserve/tasks/nli/r3/handler.py#L43-L102
import logging import sys import torch import torch.nn.functional as F import uuid from captum.attr import LayerIntegratedGradients from ts.torch_handler.base_handler import BaseHandler from allennlp.data.iterators import BasicIterator from data_utils.exvocab import ExVocabulary from data_utils.readers.roberta_nli_reader import RoBertaNLIReader from roberta_model.nli_training import RoBertaSeqClassification from settings import my_secret from shared import ( captum_nli_forward_func, check_fields, generate_response_signature, get_nli_word_token, handler_initialize, remove_sp_chars, summarize_attributions, ) logger = logging.getLogger(__name__) sys.path.append("/home/model-server/anli/src") class NliTransformerHandler(BaseHandler): def __init__(self): super().__init__() self.initialized = False
MIT License
jtpereyda/boofuzz-ftp
ftp.py
parse_ftp_reply
python
def parse_ftp_reply(data): reply_code_len = 3 if len(data) < reply_code_len: raise BooFtpException("Invalid FTP reply, too short; must be a 3-digit sequence followed by a space") else: try: reply = data[0:reply_code_len+1].decode('ascii') except ValueError: raise BooFtpException("Invalid FTP reply, non-ASCII characters; must be a 3-digit sequence followed by a space") if not re.match('[1-5][0-9][0-9] ', reply[0:4]): raise BooFtpException("Invalid FTP reply; must be a 3-digit sequence followed by a space") else: return reply[0:reply_code_len]
Parse FTP reply and return reply code. Raise BooFtpException if reply is invalid. Note: 1. Multi-line replies not supported yet RFC 959 excerpt: A reply is defined to contain the 3-digit code, followed by Space <SP>, followed by one line of text (where some maximum line length has been specified), and terminated by the Telnet end-of-line code. There will be cases however, where the text is longer than a single line... Args: data (bytes): Raw reply data
https://github.com/jtpereyda/boofuzz-ftp/blob/81ab52b2087ff1ed700458b6c1f2dbe16a5d665c/ftp.py#L42-L70
import re from boofuzz import * from boofuzz.constants import DEFAULT_PROCMON_PORT from boofuzz.utils.debugger_thread_simple import DebuggerThreadSimple from boofuzz.utils.process_monitor_local import ProcessMonitorLocal import click class BooFtpException(Exception): pass def check_reply_code(target, fuzz_data_logger, session, test_case_context, *args, **kwargs): if test_case_context.previous_message.name == "__ROOT_NODE__": return else: try: fuzz_data_logger.log_info("Parsing reply contents: {0}".format(session.last_recv)) parse_ftp_reply(session.last_recv) except BooFtpException as e: fuzz_data_logger.log_fail(str(e)) fuzz_data_logger.log_pass()
MIT License
crossbario/autobahn-python
autobahn/util.py
rid
python
def rid(): return struct.unpack("@Q", os.urandom(8))[0] & _WAMP_ID_MASK
Generate a new random integer ID from range **[0, 2**53]**. The generated ID is uniformly distributed over the whole range, doesn't have a period (no pseudo-random generator is used) and cryptographically strong. The upper bound **2**53** is chosen since it is the maximum integer that can be represented as a IEEE double such that all smaller integers are representable as well. Hence, IDs can be safely used with languages that use IEEE double as their main (or only) number type (JavaScript, Lua, etc). :returns: A random integer ID. :rtype: int
https://github.com/crossbario/autobahn-python/blob/a35f22eeaafca7568f1deb35c4a1b82ae78f77d4/autobahn/util.py#L269-L285
import os import time import struct import sys import re import base64 import math import random import binascii import socket import subprocess from typing import Optional from datetime import datetime, timedelta from pprint import pformat from array import array import txaio try: _TLS = True from OpenSSL import SSL except ImportError: _TLS = False __all__ = ("public", "encode_truncate", "xor", "utcnow", "utcstr", "id", "rid", "newid", "rtime", "Stopwatch", "Tracker", "EqualityMixin", "ObservableMixin", "IdGenerator", "generate_token", "generate_activation_code", "generate_serial_number", "generate_user_password", "machine_id") def public(obj): try: obj._is_public = True except AttributeError: pass return obj @public def encode_truncate(text, limit, encoding='utf8', return_encoded=True): assert(text is None or type(text) == str) assert(type(limit) == int) assert(limit >= 0) if text is None: return s = text.encode(encoding) if len(s) > limit: s = s[:limit] text = s.decode(encoding, 'ignore') if return_encoded: s = text.encode(encoding) if return_encoded: return s else: return text @public def xor(d1: bytes, d2: bytes) -> bytes: if type(d1) != bytes: raise Exception("invalid type {} for d1 - must be binary".format(type(d1))) if type(d2) != bytes: raise Exception("invalid type {} for d2 - must be binary".format(type(d2))) if len(d1) != len(d2): raise Exception("cannot XOR binary string of differing length ({} != {})".format(len(d1), len(d2))) d1 = array('B', d1) d2 = array('B', d2) for i in range(len(d1)): d1[i] ^= d2[i] return d1.tobytes() @public def utcstr(ts=None): assert(ts is None or isinstance(ts, datetime)) if ts is None: ts = datetime.utcnow() return "{0}Z".format(ts.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3]) @public def utcnow(): return utcstr() class IdGenerator(object): def __init__(self): self._next = 0 def next(self): self._next += 1 if self._next > 9007199254740992: self._next = 1 return self._next def __next__(self): return self.next() _WAMP_ID_MASK = struct.unpack(">Q", b"\x00\x1f\xff\xff\xff\xff\xff\xff")[0]
MIT License
raphaelyancey/virtual_fm_band
src/swmixer.py
uninterleave
python
def uninterleave(data): return data.reshape(2, len(data)/2, order='FORTRAN')
Given a stereo array, return separate left and right streams This function converts one array representing interleaved left and right audio streams into separate left and right arrays. The return value is a list of length two. Input array and output arrays are all Numpy arrays. See also: interleave()
https://github.com/raphaelyancey/virtual_fm_band/blob/e3580fdea44ae557ad27f4f1a63f736045320c20/src/swmixer.py#L230-L241
import time import wave import thread import numpy import pyaudio try: import mad except: def Read24(s): for i in range(0,len(s),3): yield ord(s[i+2])*65536 + ord(s[i+1])*256 + ord(s[i]) ginit = False gstereo = True gchunksize = 1024 gsamplerate = 44100 gchannels = 1 gsamplewidth = 2 gpyaudio = None gstream = None gmicstream = None gmic = False gmicdata = None gmixer_srcs = [] gid = 1 glock = thread.allocate_lock() ginput_device_index = None goutput_device_index = None class _SoundSourceData: def __init__(self, data, loops): self.data = data self.pos = 0 self.loops = loops self.done = False def set_position(self, pos): self.pos = pos % len(self.data) def get_samples(self, sz): z = self.data[self.pos:self.pos + sz] self.pos += sz if len(z) < sz: if self.loops != 0: self.loops -= 1 self.pos = sz - len(z) z = numpy.append(z, self.data[:sz - len(z)]) else: z = numpy.append(z, numpy.zeros(sz - len(z), numpy.int16)) self.done = True if self.pos == len(self.data): if self.loops != 0: self.loops -= 1 self.pos = 0 else: self.done = True return z class _SoundSourceStream: def __init__(self, fileobj, loops): self.fileobj = fileobj self.pos = 0 self.loops = loops self.done = False self.buf = '' def set_position(self, pos): self.pos = pos self.fileobj.seek_time(pos * 1000 / gsamplerate / 2) def get_samples(self, sz): szb = sz * 2 while len(self.buf) < szb: s = self.fileobj.read() if s is None or s == '': break self.buf += s[:] z = numpy.frombuffer(self.buf[:szb], dtype=numpy.int16) if len(z) < sz: z = numpy.append(z, numpy.zeros(sz - len(z), numpy.int16)) if self.loops != 0: self.loops -= 1 self.pos = 0 self.fileobj.seek_time(0) self.buf = '' else: self.done = True else: self.buf = self.buf[szb:] return z class Channel: def __init__(self, src, env): global gid self.id = gid gid += 1 self.src = src self.env = env self.active = True self.done = False def stop(self): glock.acquire() try: gmixer_srcs.remove(self) except ValueError: None glock.release() def pause(self): glock.acquire() self.active = False glock.release() def unpause(self): glock.acquire() self.active = True glock.release() def set_volume(self, v, fadetime=0): glock.acquire() if fadetime == 0: self.env = [[0, v]] else: curv = calc_vol(self.src.pos, self.env) self.env = [[self.src.pos, curv], [self.src.pos + fadetime, v]] glock.release() def get_volume(self): glock.acquire() v = calc_vol(self.src.pos, self.env) glock.release() return v def get_position(self): glock.acquire() p = self.src.pos glock.release() return p def set_position(self, p): glock.acquire() self.src.set_position(p) glock.release() def fadeout(self, time): glock.acquire() self.set_volume(0.0, fadetime=time) glock.release() def _get_samples(self, sz): if not self.active: return None v = calc_vol(self.src.pos, self.env) z = self.src.get_samples(sz) if self.src.done: self.done = True return z * v def resample(smp, scale=1.0): n = round(len(smp) * scale) return numpy.interp( numpy.linspace(0.0, 1.0, n, endpoint=False), numpy.linspace(0.0, 1.0, len(smp), endpoint=False), smp, ) def interleave(left, right): return numpy.ravel(numpy.vstack((left, right)), order='F')
MIT License
galoisinc/faw
common/pdf-observatory/main.py
_init_check_pdfs
python
async def _init_check_pdfs(): loader_proc = _db_abort_process() db = app_mongodb_conn col = db['observatory'] await asyncio.wait([ col.create_index([('queueErr', pymongo.ASCENDING)]), col.create_index([('queueStop', pymongo.ASCENDING)]), ]) batch = set() batch_max = 100 n_inserted = 0 async def insert_or_ignore(fpath): nonlocal n_inserted try: await col.insert_one({'_id': fpath, 'queueStart': None, 'queueStop': None, 'queueErr': None}) except pymongo.errors.DuplicateKeyError: pass else: n_inserted += 1 async def kick_asets_if_inserted(): nonlocal n_inserted if n_inserted == 0: return await db['as_metadata'].update_many({}, { '$set': { 'parser_versions': [None, None], 'parser_versions_done': [{}, {}], 'status': faw_analysis_set.AsStatus.UP_TO_DATE.value, }, }) n_inserted = 0 for ff in _walk_pdf_files(): if loader_proc.aborted: return batch.add(insert_or_ignore(ff)) if len(batch) > batch_max: _, batch = await asyncio.wait(batch, return_when=asyncio.FIRST_COMPLETED) await asyncio.wait(batch) await kick_asets_if_inserted() try: import watchgod except ImportError: return async for changes in watchgod.awatch(app_pdf_dir): for ctype, cpath in changes: cpath = os.path.relpath(cpath, app_pdf_dir) if ctype == watchgod.Change.added: print(f'File created: {cpath}; reprocessing if new') await insert_or_ignore(cpath) elif ctype == watchgod.Change.modified: n_inserted += 1 print(f'File modified: {cpath}; reprocessing') await db['rawinvocations'].delete_many({ 'file': os.path.join(app_pdf_dir, cpath)}) await db['observatory'].update_one( {'_id': cpath}, {'$unset': {'idle_complete': True}}) elif ctype == watchgod.Change.deleted: print(f'File deleted: {cpath}; doing nothing') else: raise NotImplementedError(ctype) await kick_asets_if_inserted()
Check the database to see what's populated and what's not. Additionally, control the flow of the various pdf-etl tools for on-demand import of population. Ran on program init and on DB reset via UI.
https://github.com/galoisinc/faw/blob/6181a1dddefc53ff58fd9cc6f3f1ebffd5c2bb21/common/pdf-observatory/main.py#L311-L409
import app_util import faw_analysis_set import faw_analysis_set_parse import faw_analysis_set_util import faw_pipelines_util import aiohttp.web as web import asyncio import bson import cachetools import click import collections import contextlib import functools import importlib.util import ujson as json import math import motor.motor_asyncio import os import pickle import psutil import pymongo import pympler.asizeof as asizeof import re import shlex import strictyaml import sys import tempfile import time import traceback import vuespa app_config = None app_config_loaded = None app_config_path = None app_docker = False app_hostname = None app_hostport = None app_init = None app_mongodb = None app_mongodb_conn = None app_pdf_dir = None etl_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..') @click.command() @click.argument('pdf-dir') @click.argument('mongodb') @click.option('--host', type=str, default=None) @click.option('--port', type=int, default=None) @click.option('--hostname', type=str, default=None, help="Used for teaming " "deployments; specifies the hostname passed for <apiInfo>") @click.option('--in-docker/--not-in-docker', default=False, help="Must specify if running in docker.") @click.option('--production/--no-production', default=False, help="Specify to use pre-built version of UI, rather than building " "on the fly and using Vue's hot reload functionality.") @click.option('--config', type=str, required=True, help="(Required) Path to .json defining this observatory deployment.") @click.option('--quit-after-config/--no-quit-after-config', default=False) def main(pdf_dir, mongodb, host, port, hostname, in_docker, production, config, quit_after_config): global app_config, app_config_path, app_docker, app_hostname, app_hostport, app_init, app_mongodb, app_mongodb_conn, app_pdf_dir assert in_docker, 'Config specifying parsers must be in docker' app_config_path = config app_hostname = hostname if hostname is not None else 'localhost' app_hostport = port assert hostname is None or port is not None, 'Must specify port with hostname' app_pdf_dir = os.path.abspath(pdf_dir) if '/' in mongodb: app_mongodb = mongodb else: app_mongodb = 'localhost:27017/' + mongodb _config_reload() if quit_after_config: return app_docker = in_docker mhost_port, db = app_mongodb.split('/') mhost, mport = mhost_port.split(':') app_mongodb_conn = motor.motor_asyncio.AsyncIOMotorClient(host=mhost, port=int(mport))[db] loop = asyncio.get_event_loop() async def admin_cfg(): await app_mongodb_conn.client.admin.command({ 'setFeatureCompatibilityVersion': '4.4'}) loop.run_until_complete(admin_cfg()) app_config_refresh = loop.create_task(_config_check_loop()) app_init = loop.create_task(init_check_pdfs()) loop.create_task(faw_analysis_set.main_loop(app_mongodb_conn, app_config, _get_api_info)) vuespa.VueSpa('ui', Client, host=host, port=port, development=not production, config_web_callback=functools.partial(config_web, pdf_dir=pdf_dir) ).run() def config_web(app, pdf_dir): app.router.add_routes([ web.static('/file_download', pdf_dir), web.get('/file_list', _config_web_file_list_handler), ]) async def _config_web_file_list_handler(req): file_list = list(_walk_pdf_files()) return web.Response(text='\n'.join(file_list)) async def _config_check_loop(): while True: try: ts = os.path.getmtime(app_config_path) if ts != app_config_loaded: _config_reload() except Exception: traceback.print_exc() await asyncio.sleep(0.5) def _config_reload(): global app_config, app_config_loaded, app_config_path ts_loaded = os.path.getmtime(app_config_path) app_config = app_util.config_load(app_config_path) faw_analysis_set.config_update(app_config) for p in psutil.process_iter(): if p.name() in ['dask-worker', 'dask-scheduler']: p.kill() app_config_loaded = ts_loaded _app_parser_sizetable = {} async def _app_parser_stats(): parser_cfg = faw_analysis_set_util.lookup_all_parsers( app_mongodb_conn.delegate, app_config) parsers = [] promises = [] for k, v in parser_cfg.items(): if v.get('disabled'): continue parser = { 'id': k, 'size_doc': None, 'pipeline': True if v.get('pipeline') else False, } parsers.append(parser) r = _app_parser_sizetable.get(k) if r is not None and r[1] > time.monotonic(): parser['size_doc'] = r[0] else: async def stat_pop(k, parser): ndocs = 5 docs = await app_mongodb_conn['invocationsparsed'].find({ 'parser': k, 'exitcode': 0}).limit(ndocs).to_list(None) size = None if len(docs) > 1: size = 0 fts_size = set([dr['k'] for dr in docs[0]['result']]) for d in docs[1:]: fts_new = set([dr['k'] for dr in d['result']]) fts_new.difference_update(fts_size) size += len(pickle.dumps(fts_new)) fts_size.update(fts_new) size /= len(docs) - 1 if len(docs) == ndocs: r = [size, time.monotonic() + 600] else: r = [size, time.monotonic() + 30] _app_parser_sizetable[k] = r parser['size_doc'] = r[0] promises.append(stat_pop(k, parser)) if promises: await asyncio.wait(promises) return parsers def _get_api_info(extra_info={}): r = { 'hostname': app_hostname, 'hostport': app_hostport, 'dask': f'{app_hostname}:8786', 'mongo': app_mongodb, 'pdfdir': app_pdf_dir, } r.update(extra_info) return r def _plugin_key_process(config_key, plugin_key): extra_api_info = {} if '!' in plugin_key: aset_id, pipeline, plugin_key = plugin_key.split('!') extra_api_info['aset'] = aset_id extra_api_info['pipeline'] = pipeline plugin_def = app_config['pipelines'][pipeline][config_key].get(plugin_key) if plugin_def is None: raise ValueError(f'{config_key}: {aset_id} / {pipeline} / {plugin_key} not found') else: plugin_def = app_config[config_key].get(plugin_key) if plugin_def is None: raise ValueError(f'{config_key}: {plugin_key} not found') return plugin_def, extra_api_info def _walk_pdf_files(): for path, subfolders, files in os.walk(app_pdf_dir): for i in range(len(subfolders)-1, -1, -1): if subfolders[i].startswith('.'): subfolders.pop(i) for f in files: if f.startswith('.'): continue ff = os.path.relpath(os.path.join(path, f), app_pdf_dir) yield ff def _db_reprocess(*args, **kwargs): global app_init loop = asyncio.get_event_loop() app_init = loop.create_task(init_check_pdfs(*args, **kwargs)) class _DbLoaderProc: def __init__(self): self.aborted = False def abort(self): self.aborted = True _db_loader_proc = _DbLoaderProc() def _db_abort_process(): global _db_loader_proc _db_loader_proc.abort() _db_loader_proc = _DbLoaderProc() return _db_loader_proc async def init_check_pdfs(): try: await _init_check_pdfs() except Exception: traceback.print_exc() sys.exit(1)
BSD 3-Clause New or Revised License
uqcomputingsociety/uqcsbot
test/conftest.py
MockUQCSBot.mocked_reactions_add
python
def mocked_reactions_add(self, **kwargs): name = kwargs.get('name') message = self.get_channel_message(**kwargs) if name is None or message is None: return {'ok': False, 'error': 'test'} user = kwargs.get('user', TEST_BOT_ID) if 'reactions' not in message: message['reactions'] = [] reaction_object = next((r for r in message['reactions'] if r['name'] == name), None) if reaction_object is None: reaction_object = {'name': name, 'count': 0, 'users': []} if user not in reaction_object['users']: reaction_object['count'] += 1 reaction_object['users'].append(user) message['reactions'] = [r for r in message['reactions'] if r['name'] != name] message['reactions'].append(reaction_object) return {'ok': True}
Mocks reactions.add api call.
https://github.com/uqcomputingsociety/uqcsbot/blob/3018739c2c7b7aa8c5523ff3a45f2cfdeecc51bd/test/conftest.py#L221-L249
import time from collections import defaultdict from copy import deepcopy from functools import partial from itertools import islice from typing import Optional import pytest from slack import WebClient from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from sqlalchemy.orm.session import Session import uqcsbot as uqcsbot_module from uqcsbot.api import APIWrapper from uqcsbot.base import UQCSBot, Command from uqcsbot.models import Base TEST_CHANNEL_ID = "C1234567890" TEST_GROUP_ID = "G1234567890" TEST_DIRECT_ID = "D1234567890" TEST_USER_ID = "U1234567890" TEST_BOT_ID = "B1234567890" TEST_USERS = { TEST_BOT_ID: {'id': TEST_BOT_ID, 'name': TEST_BOT_ID, 'deleted': False, 'is_bot': True, 'profile': {'display_name': TEST_BOT_ID}}, TEST_USER_ID: {'id': TEST_USER_ID, 'name': TEST_USER_ID, 'deleted': False, 'profile': {'display_name': TEST_USER_ID}} } TEST_CHANNELS = { TEST_CHANNEL_ID: {'id': TEST_CHANNEL_ID, 'name': TEST_CHANNEL_ID, 'is_public': True, 'members': [TEST_USER_ID]}, TEST_GROUP_ID: {'id': TEST_GROUP_ID, 'name': TEST_GROUP_ID, 'is_group': True, 'is_private': True, 'members': [TEST_USER_ID]}, TEST_DIRECT_ID: {'id': TEST_DIRECT_ID, 'name': TEST_DIRECT_ID, 'is_im': True, 'is_private': True, 'is_user_deleted': False, 'user': TEST_USER_ID} } for item in ['is_im', 'is_public', 'is_private', 'is_group']: for chan in TEST_CHANNELS.values(): if item not in chan: chan[item] = False class MockUQCSBot(UQCSBot): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.test_messages = defaultdict(list) self.test_users = deepcopy(TEST_USERS) self.test_channels = deepcopy(TEST_CHANNELS) self.db_engine = create_engine("sqlite:///:memory", echo=True) Base.metadata.create_all(self.db_engine) self._mock_session_maker = sessionmaker(bind=self.db_engine) def mocked_api_call(method, *, http_verb='POST', **kwargs): mocked_method = 'mocked_' + method.replace('.', '_') if mocked_method not in dir(type(self)): raise NotImplementedError(f'{method} has not been mocked.') if http_verb == 'GET': kwargs.update(kwargs.pop('params', {})) elif http_verb == 'POST': kwargs.update(kwargs.pop('json', {})) return getattr(self, mocked_method)(**kwargs) self.mocked_client = WebClient('fake-token') self.mocked_client.api_call = mocked_api_call @property def api(self): return APIWrapper(self.mocked_client, self.mocked_client) def mocked_users_info(self, **kwargs): user_id = kwargs.get('user') user = self.test_users.get(user_id) if user is None: return {'ok': False, 'error': 'test'} return {'ok': True, 'user': user} def mocked_conversations_members(self, **kwargs): channel_id = kwargs.get('channel') cursor = kwargs.get('cursor', 0) limit = kwargs.get('limit', 100) channel = self.test_channels.get(channel_id) if channel is None: return {'ok': False, 'error': 'test'} all_members = channel.get('members', []) sliced_members = all_members[cursor: cursor + limit + 1] cursor += len(sliced_members) if cursor >= len(all_members): cursor = None return {'ok': True, 'members': sliced_members, 'cursor': cursor} def mocked_conversations_history(self, **kwargs): channel_id = kwargs.get('channel') cursor = kwargs.get('cursor', 0) limit = kwargs.get('limit', 100) if channel_id not in self.test_channels: return {'ok': False, 'error': 'test'} all_messages = self.test_messages.get(channel_id, [])[::-1] sliced_messages = list(islice(all_messages, cursor, cursor + limit + 1)) cursor += len(sliced_messages) if cursor >= len(all_messages): cursor = None return {'ok': True, 'messages': sliced_messages, 'cursor': cursor} def mocked_groups_list(self, **kwargs): return self.mocked_channels_list(channel_type='groups', **kwargs) def mocked_im_list(self, **kwargs): return self.mocked_channels_list(channel_type='ims', **kwargs) def mocked_conversations_list(self, **kwargs): return self.mocked_channels_list(channel_type='all', **kwargs) def mocked_channels_list(self, channel_type='channels', **kwargs): cursor = kwargs.get('cursor', 0) limit = kwargs.get('limit', 100) def is_channel_type(channel, channel_type): return channel.get(channel_type, False) if channel_type == 'all': def filter_function(*args): return True channel_type = 'channels' elif channel_type == 'channels': filter_function = partial(is_channel_type, channel_type='is_public') elif channel_type == 'groups': filter_function = partial(is_channel_type, channel_type='is_group') elif channel_type == 'ims': filter_function = partial(is_channel_type, channel_type='is_im') else: return {'ok': False, 'error': 'test'} all_channels = list(filter(filter_function, self.test_channels.values())) sliced_channels = all_channels[cursor: cursor + limit + 1] cursor += len(sliced_channels) if cursor >= len(all_channels): cursor = None return {'ok': True, channel_type: sliced_channels, 'cursor': cursor} def mocked_users_list(self, **kwargs): cursor = kwargs.get('cursor', 0) limit = kwargs.get('limit', 100) all_members = list(self.test_users.values()) sliced_members = all_members[cursor: cursor + limit + 1] cursor += len(sliced_members) if cursor >= len(all_members): cursor = None return {'ok': True, 'members': sliced_members, 'cursor': cursor} def get_channel_message(self, **kwargs) -> Optional[dict]: channel_id_or_name = kwargs.get('channel') timestamp = kwargs.get('timestamp') channel = self.channels.get(channel_id_or_name) if channel is None or timestamp is None: return None channel_messages = self.test_messages.get(channel.id, []) return next((m for m in channel_messages if m['ts'] == timestamp), None)
MIT License
sketchfab/c4d-plugin
dependencies/OSX/PIL/ImageDraw.py
ImageDraw.chord
python
def chord(self, xy, start, end, fill=None, outline=None, width=1): ink, fill = self._getink(outline, fill) if fill is not None: self.draw.draw_chord(xy, start, end, fill, 1) if ink is not None and ink != fill and width != 0: self.draw.draw_chord(xy, start, end, ink, 0, width)
Draw a chord.
https://github.com/sketchfab/c4d-plugin/blob/d06ef20acdeffa53032b521073b820ed374807ef/dependencies/OSX/PIL/ImageDraw.py#L136-L142
import math import numbers from . import Image, ImageColor class ImageDraw: def __init__(self, im, mode=None): im.load() if im.readonly: im._copy() blend = 0 if mode is None: mode = im.mode if mode != im.mode: if mode == "RGBA" and im.mode == "RGB": blend = 1 else: raise ValueError("mode mismatch") if mode == "P": self.palette = im.palette else: self.palette = None self.im = im.im self.draw = Image.core.draw(self.im, blend) self.mode = mode if mode in ("I", "F"): self.ink = self.draw.draw_ink(1) else: self.ink = self.draw.draw_ink(-1) if mode in ("1", "P", "I", "F"): self.fontmode = "1" else: self.fontmode = "L" self.fill = 0 self.font = None def getfont(self): if not self.font: from . import ImageFont self.font = ImageFont.load_default() return self.font def _getink(self, ink, fill=None): if ink is None and fill is None: if self.fill: fill = self.ink else: ink = self.ink else: if ink is not None: if isinstance(ink, str): ink = ImageColor.getcolor(ink, self.mode) if self.palette and not isinstance(ink, numbers.Number): ink = self.palette.getcolor(ink) ink = self.draw.draw_ink(ink) if fill is not None: if isinstance(fill, str): fill = ImageColor.getcolor(fill, self.mode) if self.palette and not isinstance(fill, numbers.Number): fill = self.palette.getcolor(fill) fill = self.draw.draw_ink(fill) return ink, fill def arc(self, xy, start, end, fill=None, width=1): ink, fill = self._getink(fill) if ink is not None: self.draw.draw_arc(xy, start, end, ink, width) def bitmap(self, xy, bitmap, fill=None): bitmap.load() ink, fill = self._getink(fill) if ink is None: ink = fill if ink is not None: self.draw.draw_bitmap(xy, bitmap.im, ink)
Apache License 2.0
continualai/avalanche
avalanche/evaluation/metrics/forgetting_bwt.py
GenericExperienceForgetting.update
python
def update(self, k, v, initial=False): self.forgetting.update(k, v, initial=initial)
Update forgetting metric. See `Forgetting` for more detailed information. :param k: key to update :param v: value associated to k :param initial: update initial value. If False, update last value.
https://github.com/continualai/avalanche/blob/9d72ee638d10af989455df8d062e8e86a4399c1d/avalanche/evaluation/metrics/forgetting_bwt.py#L176-L186
from typing import Dict, TYPE_CHECKING, Union, List from avalanche.evaluation.metric_definitions import Metric, PluginMetric from avalanche.evaluation.metric_results import MetricValue, MetricResult from avalanche.evaluation.metrics import Accuracy, Mean from avalanche.evaluation.metric_utils import get_metric_name, phase_and_task, stream_type if TYPE_CHECKING: from avalanche.training import BaseStrategy class Forgetting(Metric[Union[float, None, Dict[int, float]]]): def __init__(self): super().__init__() self.initial: Dict[int, float] = dict() self.last: Dict[int, float] = dict() def update_initial(self, k, v): self.initial[k] = v def update_last(self, k, v): self.last[k] = v def update(self, k, v, initial=False): if initial: self.update_initial(k, v) else: self.update_last(k, v) def result(self, k=None) -> Union[float, None, Dict[int, float]]: forgetting = {} if k is not None: if k in self.initial and k in self.last: return self.initial[k] - self.last[k] else: return None ik = set(self.initial.keys()) both_keys = list(ik.intersection(set(self.last.keys()))) for k in both_keys: forgetting[k] = self.initial[k] - self.last[k] return forgetting def reset_last(self) -> None: self.last: Dict[int, float] = dict() def reset(self) -> None: self.initial: Dict[int, float] = dict() self.last: Dict[int, float] = dict() class GenericExperienceForgetting(PluginMetric[Dict[int, float]]): def __init__(self): super().__init__() self.forgetting = Forgetting() self._current_metric = None self.eval_exp_id = None self.train_exp_id = None def reset(self) -> None: self.forgetting.reset() def reset_last(self) -> None: self.forgetting.reset_last()
MIT License
fitnr/convertdate
src/convertdate/bahai.py
to_jd
python
def to_jd(year, month, day): if month <= 18: gy = year - 1 + EPOCH_GREGORIAN_YEAR n_month, n_day = gregorian_nawruz(gy) return gregorian.to_jd(gy, n_month, n_day - 1) + day + (month - 1) * 19 if month == 19: return to_jd(year, month - 1, 19) + day gy = year + EPOCH_GREGORIAN_YEAR n_month, n_day = gregorian_nawruz(gy) return gregorian.to_jd(gy, n_month, n_day) - 20 + day
Determine Julian day from Bahai date
https://github.com/fitnr/convertdate/blob/7c024404aee1b0f06a34113de129dac37aa5d995/src/convertdate/bahai.py#L134-L146
from calendar import isleap from math import ceil, trunc from pymeeus.Angle import Angle from pymeeus.Epoch import Epoch from pymeeus.Sun import Sun from . import gregorian from .utils import jwday, monthcalendarhelper EPOCH = 2394646.5 EPOCH_GREGORIAN_YEAR = 1844 TEHRAN = 51.4215, 35.6944 WEEKDAYS = ("Jamál", "Kamál", "Fidál", "Idál", "Istijlál", "Istiqlál", "Jalál") MONTHS = ( "Bahá", "Jalál", "Jamál", "‘Aẓamat", "Núr", "Raḥmat", "Kalimát", "Kamál", "Asmá’", "‘Izzat", "Mashíyyat", "‘Ilm", "Qudrat", "Qawl", "Masá’il", "Sharaf", "Sulṭán", "Mulk", "Ayyám-i-Há", "‘Alá", ) ENGLISH_MONTHS = ( "Splendor", "Glory", "Beauty", "Grandeur", "Light", "Mercy", "Words", "Perfection", "Names", "Might", "Will", "Knowledge", "Power", "Speech", "Questions", "Honour", "Sovereignty", "Dominion", "Days of Há", "Loftiness", ) BAHA = 1 JALAL = 2 JAMAL = 3 AZAMAT = 4 NUR = 5 RAHMAT = 6 KALIMAT = 7 KAMAL = 8 ASMA = 9 IZZAT = 10 MASHIYYAT = 11 ILM = 12 QUDRAT = 13 QAWL = 14 MASAIL = 15 SHARAF = 16 SULTAN = 17 MULK = 18 AYYAMIHA = 19 ALA = 20 def gregorian_nawruz(year): if year == 2059: return 3, 20 equinox = Sun.get_equinox_solstice(year, "spring") x, y = Angle(TEHRAN[0]), Angle(TEHRAN[1]) days = trunc(equinox.get_date()[2]), ceil(equinox.get_date()[2]) for day in days: sunset = Epoch(year, 3, day).rise_set(y, x)[1] if sunset > equinox: return 3, day raise ValueError("Couldn't find date of Nawruz.")
MIT License
believefxy/lightsans
recbole/data/dataloader/neg_sample_mixin.py
NegSampleMixin.get_pos_len_list
python
def get_pos_len_list(self): raise NotImplementedError('Method [get_pos_len_list] should be implemented.')
Returns: np.ndarray or list: Number of positive item for each user in a training/evaluating epoch.
https://github.com/believefxy/lightsans/blob/94ce7e59d144dbc787153b8c486cad334790ec6e/recbole/data/dataloader/neg_sample_mixin.py#L72-L77
from recbole.data.dataloader.abstract_dataloader import AbstractDataLoader from recbole.utils import DataLoaderType, EvaluatorType, FeatureSource, FeatureType, InputType class NegSampleMixin(AbstractDataLoader): dl_type = DataLoaderType.NEGSAMPLE def __init__(self, config, dataset, sampler, neg_sample_args, batch_size=1, dl_format=InputType.POINTWISE, shuffle=False): if neg_sample_args['strategy'] not in ['by', 'full']: raise ValueError('neg_sample strategy [{}] has not been implemented'.format(neg_sample_args['strategy'])) self.sampler = sampler self.neg_sample_args = neg_sample_args super().__init__(config, dataset, batch_size=batch_size, dl_format=dl_format, shuffle=shuffle) def setup(self): self._batch_size_adaptation() def data_preprocess(self): raise NotImplementedError('Method [data_preprocess] should be implemented.') def _batch_size_adaptation(self): raise NotImplementedError('Method [batch_size_adaptation] should be implemented.') def _neg_sampling(self, inter_feat): raise NotImplementedError('Method [neg_sampling] should be implemented.')
MIT License
kane610/axis
axis/view_areas.py
ViewAreas.list
python
async def list(self) -> dict: return await self._request( "post", URL_INFO, json=attr.asdict( Body("list", API_VERSION), filter=attr.filters.exclude(attr.fields(Body).params), ), )
List the content of a view area. It is possible to list either one or multiple profiles and if the parameter streamProfileName is the empty list [] all available stream profiles will be listed. Security level: Viewer
https://github.com/kane610/axis/blob/44e141158affba2bf329b9fa69fc2d695c3fc2c9/axis/view_areas.py#L110-L124
from typing import Optional import attr from .api import APIItem, APIItems, Body URL = "/axis-cgi/viewarea" URL_INFO = f"{URL}/info.cgi" URL_CONFIG = f"{URL}/configure.cgi" API_DISCOVERY_ID = "view-area" API_VERSION = "1.0" @attr.s class Geometry: horizontalOffset: int = attr.ib() horizontalSize: int = attr.ib() verticalOffset: int = attr.ib() verticalSize: int = attr.ib() @attr.s class Size: horizontal: int = attr.ib() vertical: int = attr.ib() class ViewArea(APIItem): @property def source(self) -> int: return self.raw["source"] @property def camera(self) -> int: return self.raw["camera"] @property def configurable(self) -> bool: return self.raw["configurable"] @property def canvas_size(self) -> Size: return Size(**self.raw["canvasSize"]) @property def rectangular_geometry(self) -> Geometry: return Geometry(**self.raw["rectangularGeometry"]) @property def min_size(self) -> Size: return Size(**self.raw["minSize"]) @property def max_size(self) -> Size: return Size(**self.raw["maxSize"]) @property def grid(self) -> Geometry: return Geometry(**self.raw["grid"]) class ViewAreas(APIItems): def __init__(self, request: object) -> None: super().__init__({}, request, URL, ViewArea) async def update(self) -> None: raw = await self.list() self.process_raw(raw) @staticmethod def pre_process_raw(raw: dict) -> dict: view_area_data = raw.get("data", {}).get("viewAreas", []) return {str(api["id"]): api for api in view_area_data}
MIT License
aoikuiyuyou/aoikhotkey
src/aoikhotkeydep/pyHook_versions/pyHook-py2.7-64bit/pyHook/HookManager.py
HookManager.SubscribeMouseAllButtonsUp
python
def SubscribeMouseAllButtonsUp(self, func): self.SubscribeMouseLeftUp(func) self.SubscribeMouseRightUp(func) self.SubscribeMouseMiddleUp(func)
Registers the given function as the callback for all mouse button up events. Use the MouseAllButtonsUp property as a shortcut. @param func: Callback function @type func: callable
https://github.com/aoikuiyuyou/aoikhotkey/blob/7a04f8fddee02df41ff436a696b5261ae588cb62/src/aoikhotkeydep/pyHook_versions/pyHook-py2.7-64bit/pyHook/HookManager.py#L534-L544
from . import cpyHook def GetKeyState(key_id): return cpyHook.cGetKeyState(key_id) class HookConstants: WH_MIN = -1 WH_MSGFILTER = -1 WH_JOURNALRECORD = 0 WH_JOURNALPLAYBACK = 1 WH_KEYBOARD = 2 WH_GETMESSAGE = 3 WH_CALLWNDPROC = 4 WH_CBT = 5 WH_SYSMSGFILTER = 6 WH_MOUSE = 7 WH_HARDWARE = 8 WH_DEBUG = 9 WH_SHELL = 10 WH_FOREGROUNDIDLE = 11 WH_CALLWNDPROCRET = 12 WH_KEYBOARD_LL = 13 WH_MOUSE_LL = 14 WH_MAX = 15 WM_MOUSEFIRST = 0x0200 WM_MOUSEMOVE = 0x0200 WM_LBUTTONDOWN = 0x0201 WM_LBUTTONUP = 0x0202 WM_LBUTTONDBLCLK = 0x0203 WM_RBUTTONDOWN =0x0204 WM_RBUTTONUP = 0x0205 WM_RBUTTONDBLCLK = 0x0206 WM_MBUTTONDOWN = 0x0207 WM_MBUTTONUP = 0x0208 WM_MBUTTONDBLCLK = 0x0209 WM_MOUSEWHEEL = 0x020A WM_MOUSELAST = 0x020A WM_KEYFIRST = 0x0100 WM_KEYDOWN = 0x0100 WM_KEYUP = 0x0101 WM_CHAR = 0x0102 WM_DEADCHAR = 0x0103 WM_SYSKEYDOWN = 0x0104 WM_SYSKEYUP = 0x0105 WM_SYSCHAR = 0x0106 WM_SYSDEADCHAR = 0x0107 WM_KEYLAST = 0x0108 vk_to_id = {'VK_LBUTTON' : 0x01, 'VK_RBUTTON' : 0x02, 'VK_CANCEL' : 0x03, 'VK_MBUTTON' : 0x04, 'VK_BACK' : 0x08, 'VK_TAB' : 0x09, 'VK_CLEAR' : 0x0C, 'VK_RETURN' : 0x0D, 'VK_SHIFT' : 0x10, 'VK_CONTROL' : 0x11, 'VK_MENU' : 0x12, 'VK_PAUSE' : 0x13, 'VK_CAPITAL' : 0x14, 'VK_KANA' : 0x15, 'VK_HANGEUL' : 0x15, 'VK_HANGUL' : 0x15, 'VK_JUNJA' : 0x17, 'VK_FINAL' : 0x18, 'VK_HANJA' : 0x19, 'VK_KANJI' : 0x19, 'VK_ESCAPE' : 0x1B, 'VK_CONVERT' : 0x1C, 'VK_NONCONVERT' : 0x1D, 'VK_ACCEPT' : 0x1E, 'VK_MODECHANGE' : 0x1F, 'VK_SPACE' : 0x20, 'VK_PRIOR' : 0x21, 'VK_NEXT' : 0x22, 'VK_END' : 0x23, 'VK_HOME' : 0x24, 'VK_LEFT' : 0x25, 'VK_UP' : 0x26, 'VK_RIGHT' : 0x27, 'VK_DOWN' : 0x28, 'VK_SELECT' : 0x29, 'VK_PRINT' : 0x2A, 'VK_EXECUTE' : 0x2B, 'VK_SNAPSHOT' : 0x2C, 'VK_INSERT' : 0x2D, 'VK_DELETE' : 0x2E, 'VK_HELP' : 0x2F, 'VK_LWIN' : 0x5B, 'VK_RWIN' : 0x5C, 'VK_APPS' : 0x5D, 'VK_NUMPAD0' : 0x60, 'VK_NUMPAD1' : 0x61, 'VK_NUMPAD2' : 0x62, 'VK_NUMPAD3' : 0x63, 'VK_NUMPAD4' : 0x64, 'VK_NUMPAD5' : 0x65, 'VK_NUMPAD6' : 0x66, 'VK_NUMPAD7' : 0x67, 'VK_NUMPAD8' : 0x68, 'VK_NUMPAD9' : 0x69, 'VK_MULTIPLY' : 0x6A, 'VK_ADD' : 0x6B, 'VK_SEPARATOR' : 0x6C, 'VK_SUBTRACT' : 0x6D, 'VK_DECIMAL' : 0x6E, 'VK_DIVIDE' : 0x6F ,'VK_F1' : 0x70, 'VK_F2' : 0x71, 'VK_F3' : 0x72, 'VK_F4' : 0x73, 'VK_F5' : 0x74, 'VK_F6' : 0x75, 'VK_F7' : 0x76, 'VK_F8' : 0x77, 'VK_F9' : 0x78, 'VK_F10' : 0x79, 'VK_F11' : 0x7A, 'VK_F12' : 0x7B, 'VK_F13' : 0x7C, 'VK_F14' : 0x7D, 'VK_F15' : 0x7E, 'VK_F16' : 0x7F, 'VK_F17' : 0x80, 'VK_F18' : 0x81, 'VK_F19' : 0x82, 'VK_F20' : 0x83, 'VK_F21' : 0x84, 'VK_F22' : 0x85, 'VK_F23' : 0x86, 'VK_F24' : 0x87, 'VK_NUMLOCK' : 0x90, 'VK_SCROLL' : 0x91, 'VK_LSHIFT' : 0xA0, 'VK_RSHIFT' : 0xA1, 'VK_LCONTROL' : 0xA2, 'VK_RCONTROL' : 0xA3, 'VK_LMENU' : 0xA4, 'VK_RMENU' : 0xA5, 'VK_PROCESSKEY' : 0xE5, 'VK_ATTN' : 0xF6, 'VK_CRSEL' : 0xF7, 'VK_EXSEL' : 0xF8, 'VK_EREOF' : 0xF9, 'VK_PLAY' : 0xFA, 'VK_ZOOM' : 0xFB, 'VK_NONAME' : 0xFC, 'VK_PA1' : 0xFD, 'VK_OEM_CLEAR' : 0xFE, 'VK_BROWSER_BACK' : 0xA6, 'VK_BROWSER_FORWARD' : 0xA7, 'VK_BROWSER_REFRESH' : 0xA8, 'VK_BROWSER_STOP' : 0xA9, 'VK_BROWSER_SEARCH' : 0xAA, 'VK_BROWSER_FAVORITES' : 0xAB, 'VK_BROWSER_HOME' : 0xAC, 'VK_VOLUME_MUTE' : 0xAD, 'VK_VOLUME_DOWN' : 0xAE, 'VK_VOLUME_UP' : 0xAF, 'VK_MEDIA_NEXT_TRACK' : 0xB0, 'VK_MEDIA_PREV_TRACK' : 0xB1, 'VK_MEDIA_STOP' : 0xB2, 'VK_MEDIA_PLAY_PAUSE' : 0xB3, 'VK_LAUNCH_MAIL' : 0xB4, 'VK_LAUNCH_MEDIA_SELECT' : 0xB5, 'VK_LAUNCH_APP1' : 0xB6, 'VK_LAUNCH_APP2' : 0xB7, 'VK_OEM_1' : 0xBA, 'VK_OEM_PLUS' : 0xBB, 'VK_OEM_COMMA' : 0xBC, 'VK_OEM_MINUS' : 0xBD, 'VK_OEM_PERIOD' : 0xBE, 'VK_OEM_2' : 0xBF, 'VK_OEM_3' : 0xC0, 'VK_OEM_4' : 0xDB, 'VK_OEM_5' : 0xDC, 'VK_OEM_6' : 0xDD, 'VK_OEM_7' : 0xDE, 'VK_OEM_8' : 0xDF, 'VK_OEM_102' : 0xE2, 'VK_PROCESSKEY' : 0xE5, 'VK_PACKET' : 0xE7} id_to_vk = dict([(v,k) for k,v in vk_to_id.items()]) msg_to_name = {WM_MOUSEMOVE : 'mouse move', WM_LBUTTONDOWN : 'mouse left down', WM_LBUTTONUP : 'mouse left up', WM_LBUTTONDBLCLK : 'mouse left double', WM_RBUTTONDOWN : 'mouse right down', WM_RBUTTONUP : 'mouse right up', WM_RBUTTONDBLCLK : 'mouse right double', WM_MBUTTONDOWN : 'mouse middle down', WM_MBUTTONUP : 'mouse middle up', WM_MBUTTONDBLCLK : 'mouse middle double', WM_MOUSEWHEEL : 'mouse wheel', WM_KEYDOWN : 'key down', WM_KEYUP : 'key up', WM_CHAR : 'key char', WM_DEADCHAR : 'key dead char', WM_SYSKEYDOWN : 'key sys down', WM_SYSKEYUP : 'key sys up', WM_SYSCHAR : 'key sys char', WM_SYSDEADCHAR : 'key sys dead char'} def MsgToName(cls, msg): return HookConstants.msg_to_name.get(msg) def VKeyToID(cls, vkey): return HookConstants.vk_to_id.get(vkey) def IDToName(cls, code): if (code >= 0x30 and code <= 0x39) or (code >= 0x41 and code <= 0x5A): text = chr(code) else: text = HookConstants.id_to_vk.get(code) if text is not None: text = text[3:].title() return text MsgToName=classmethod(MsgToName) IDToName=classmethod(IDToName) VKeyToID=classmethod(VKeyToID) class HookEvent(object): def __init__(self, msg, time, hwnd, window_name): self.Message = msg self.Time = time self.Window = hwnd self.WindowName = window_name def GetMessageName(self): return HookConstants.MsgToName(self.Message) MessageName = property(fget=GetMessageName) class MouseEvent(HookEvent): def __init__(self, msg, x, y, data, flags, time, hwnd, window_name): HookEvent.__init__(self, msg, time, hwnd, window_name) self.Position = (x,y) if data > 0: w = 1 elif data < 0: w = -1 else: w = 0 self.Wheel = w self.Injected = flags & 0x01 class KeyboardEvent(HookEvent): def __init__(self, msg, vk_code, scan_code, ascii, flags, time, hwnd, window_name): HookEvent.__init__(self, msg, time, hwnd, window_name) self.KeyID = vk_code self.ScanCode = scan_code self.Ascii = ascii self.flags = flags def GetKey(self): return HookConstants.IDToName(self.KeyID) def IsExtended(self): return self.flags & 0x01 def IsInjected(self): return self.flags & 0x10 def IsAlt(self): return self.flags & 0x20 def IsTransition(self): return self.flags & 0x80 Key = property(fget=GetKey) Extended = property(fget=IsExtended) Injected = property(fget=IsInjected) Alt = property(fget=IsAlt) Transition = property(fget=IsTransition) class HookManager(object): def __init__(self): self.mouse_funcs = {} self.keyboard_funcs = {} self.mouse_hook = False self.key_hook = False def __del__(self): self.UnhookMouse() self.UnhookKeyboard() def HookMouse(self): cpyHook.cSetHook(HookConstants.WH_MOUSE_LL, self.MouseSwitch) self.mouse_hook = True def HookKeyboard(self): cpyHook.cSetHook(HookConstants.WH_KEYBOARD_LL, self.KeyboardSwitch) self.keyboard_hook = True def UnhookMouse(self): if self.mouse_hook: cpyHook.cUnhook(HookConstants.WH_MOUSE_LL) self.mouse_hook = False def UnhookKeyboard(self): if self.keyboard_hook: cpyHook.cUnhook(HookConstants.WH_KEYBOARD_LL) self.keyboard_hook = False def MouseSwitch(self, msg, x, y, data, flags, time, hwnd, window_name): event = MouseEvent(msg, x, y, data, flags, time, hwnd, window_name) func = self.mouse_funcs.get(msg) if func: return func(event) else: return True def KeyboardSwitch(self, msg, vk_code, scan_code, ascii, flags, time, hwnd, win_name): event = KeyboardEvent(msg, vk_code, scan_code, ascii, flags, time, hwnd, win_name) func = self.keyboard_funcs.get(msg) if func: return func(event) else: return True def SubscribeMouseMove(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_MOUSEMOVE) else: self.connect(self.mouse_funcs, HookConstants.WM_MOUSEMOVE, func) def SubscribeMouseLeftUp(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_LBUTTONUP) else: self.connect(self.mouse_funcs, HookConstants.WM_LBUTTONUP, func) def SubscribeMouseLeftDown(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_LBUTTONDOWN) else: self.connect(self.mouse_funcs, HookConstants.WM_LBUTTONDOWN, func) def SubscribeMouseLeftDbl(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_LBUTTONDBLCLK) else: self.connect(self.mouse_funcs, HookConstants.WM_LBUTTONDBLCLK, func) def SubscribeMouseRightUp(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_RBUTTONUP) else: self.connect(self.mouse_funcs, HookConstants.WM_RBUTTONUP, func) def SubscribeMouseRightDown(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_RBUTTONDOWN) else: self.connect(self.mouse_funcs, HookConstants.WM_RBUTTONDOWN, func) def SubscribeMouseRightDbl(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_RBUTTONDBLCLK) else: self.connect(self.mouse_funcs, HookConstants.WM_RBUTTONDBLCLK, func) def SubscribeMouseMiddleUp(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_MBUTTONUP) else: self.connect(self.mouse_funcs, HookConstants.WM_MBUTTONUP, func) def SubscribeMouseMiddleDown(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_MBUTTONDOWN) else: self.connect(self.mouse_funcs, HookConstants.WM_MBUTTONDOWN, func) def SubscribeMouseMiddleDbl(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_MBUTTONDBLCLK) else: self.connect(self.mouse_funcs, HookConstants.WM_MBUTTONDBLCLK, func) def SubscribeMouseWheel(self, func): if func is None: self.disconnect(self.mouse_funcs, HookConstants.WM_MOUSEWHEEL) else: self.connect(self.mouse_funcs, HookConstants.WM_MOUSEWHEEL, func) def SubscribeMouseAll(self, func): self.SubscribeMouseMove(func) self.SubscribeMouseWheel(func) self.SubscribeMouseAllButtons(func) def SubscribeMouseAllButtons(self, func): self.SubscribeMouseAllButtonsDown(func) self. SubscribeMouseAllButtonsUp(func) self.SubscribeMouseAllButtonsDbl(func) def SubscribeMouseAllButtonsDown(self, func): self.SubscribeMouseLeftDown(func) self.SubscribeMouseRightDown(func) self.SubscribeMouseMiddleDown(func)
MIT License
plaid/plaid-python
plaid/model/paystub_address.py
PaystubAddress.__init__
python
def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): if var_name not in self.attribute_map and self._configuration is not None and self._configuration.discard_unknown_keys and self.additional_properties_type is None: continue setattr(self, var_name, var_value)
PaystubAddress - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) city (str, none_type): The full city name.. [optional] # noqa: E501 country (str, none_type): The ISO 3166-1 alpha-2 country code.. [optional] # noqa: E501 postal_code (str, none_type): The postal code of the address.. [optional] # noqa: E501 region (str, none_type): The region or state Example: `\"NC\"`. [optional] # noqa: E501 street (str, none_type): The full street address.. [optional] # noqa: E501 line1 (str, none_type): Street address line 1.. [optional] # noqa: E501 line2 (str, none_type): Street address line 2.. [optional] # noqa: E501 state_code (str, none_type): The region or state Example: `\"NC\"`. [optional] # noqa: E501
https://github.com/plaid/plaid-python/blob/950d04d621a5f5b92a7705cc30d14d4004db8543/plaid/model/paystub_address.py#L117-L191
import re import sys from plaid.model_utils import ( ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) class PaystubAddress(ModelNormal): allowed_values = { } validations = { } @cached_property def additional_properties_type(): return (bool, date, datetime, dict, float, int, list, str, none_type,) _nullable = False @cached_property def openapi_types(): return { 'city': (str, none_type,), 'country': (str, none_type,), 'postal_code': (str, none_type,), 'region': (str, none_type,), 'street': (str, none_type,), 'line1': (str, none_type,), 'line2': (str, none_type,), 'state_code': (str, none_type,), } @cached_property def discriminator(): return None attribute_map = { 'city': 'city', 'country': 'country', 'postal_code': 'postal_code', 'region': 'region', 'street': 'street', 'line1': 'line1', 'line2': 'line2', 'state_code': 'state_code', } _composed_schemas = {} required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) @convert_js_args_to_python_args
MIT License
zzzeek/alembic
alembic/autogenerate/render.py
_fk_colspec
python
def _fk_colspec( fk: "ForeignKey", metadata_schema: Optional[str], namespace_metadata: "MetaData", ) -> str: colspec = fk._get_colspec() tokens = colspec.split(".") tname, colname = tokens[-2:] if metadata_schema is not None and len(tokens) == 2: table_fullname = "%s.%s" % (metadata_schema, tname) else: table_fullname = ".".join(tokens[0:-1]) if ( not fk.link_to_name and fk.parent is not None and fk.parent.table is not None ): if table_fullname in namespace_metadata.tables: col = namespace_metadata.tables[table_fullname].c.get(colname) if col is not None: colname = _ident(col.name) colspec = "%s.%s" % (table_fullname, colname) return colspec
Implement a 'safe' version of ForeignKey._get_colspec() that won't fail if the remote table can't be resolved.
https://github.com/zzzeek/alembic/blob/9b01e5fa7178333f2e78ee0fc1322112307b51dd/alembic/autogenerate/render.py#L958-L992
from collections import OrderedDict from io import StringIO import re from typing import Any from typing import cast from typing import Dict from typing import List from typing import Optional from typing import Tuple from typing import TYPE_CHECKING from typing import Union from mako.pygen import PythonPrinter from sqlalchemy import schema as sa_schema from sqlalchemy import sql from sqlalchemy import types as sqltypes from sqlalchemy.sql.elements import conv from .. import util from ..operations import ops from ..util import compat from ..util import sqla_compat from ..util.compat import string_types if TYPE_CHECKING: from typing import Literal from sqlalchemy.sql.elements import ColumnElement from sqlalchemy.sql.elements import quoted_name from sqlalchemy.sql.elements import TextClause from sqlalchemy.sql.schema import CheckConstraint from sqlalchemy.sql.schema import Column from sqlalchemy.sql.schema import Constraint from sqlalchemy.sql.schema import DefaultClause from sqlalchemy.sql.schema import FetchedValue from sqlalchemy.sql.schema import ForeignKey from sqlalchemy.sql.schema import ForeignKeyConstraint from sqlalchemy.sql.schema import Index from sqlalchemy.sql.schema import MetaData from sqlalchemy.sql.schema import PrimaryKeyConstraint from sqlalchemy.sql.schema import UniqueConstraint from sqlalchemy.sql.sqltypes import ARRAY from sqlalchemy.sql.type_api import TypeEngine from sqlalchemy.sql.type_api import Variant from alembic.autogenerate.api import AutogenContext from alembic.config import Config from alembic.operations.ops import MigrationScript from alembic.operations.ops import ModifyTableOps from alembic.util.sqla_compat import Computed from alembic.util.sqla_compat import Identity MAX_PYTHON_ARGS = 255 def _render_gen_name( autogen_context: "AutogenContext", name: Optional[Union["quoted_name", str]], ) -> Optional[Union["quoted_name", str, "_f_name"]]: if isinstance(name, conv): return _f_name(_alembic_autogenerate_prefix(autogen_context), name) else: return name def _indent(text: str) -> str: text = re.compile(r"^", re.M).sub(" ", text).strip() text = re.compile(r" +$", re.M).sub("", text) return text def _render_python_into_templatevars( autogen_context: "AutogenContext", migration_script: "MigrationScript", template_args: Dict[str, Union[str, "Config"]], ) -> None: imports = autogen_context.imports for upgrade_ops, downgrade_ops in zip( migration_script.upgrade_ops_list, migration_script.downgrade_ops_list ): template_args[upgrade_ops.upgrade_token] = _indent( _render_cmd_body(upgrade_ops, autogen_context) ) template_args[downgrade_ops.downgrade_token] = _indent( _render_cmd_body(downgrade_ops, autogen_context) ) template_args["imports"] = "\n".join(sorted(imports)) default_renderers = renderers = util.Dispatcher() def _render_cmd_body( op_container: "ops.OpContainer", autogen_context: "AutogenContext", ) -> str: buf = StringIO() printer = PythonPrinter(buf) printer.writeline( "# ### commands auto generated by Alembic - please adjust! ###" ) has_lines = False for op in op_container.ops: lines = render_op(autogen_context, op) has_lines = has_lines or bool(lines) for line in lines: printer.writeline(line) if not has_lines: printer.writeline("pass") printer.writeline("# ### end Alembic commands ###") return buf.getvalue() def render_op( autogen_context: "AutogenContext", op: "ops.MigrateOperation" ) -> List[str]: renderer = renderers.dispatch(op) lines = util.to_list(renderer(autogen_context, op)) return lines def render_op_text( autogen_context: "AutogenContext", op: "ops.MigrateOperation" ) -> str: return "\n".join(render_op(autogen_context, op)) @renderers.dispatch_for(ops.ModifyTableOps) def _render_modify_table( autogen_context: "AutogenContext", op: "ModifyTableOps" ) -> List[str]: opts = autogen_context.opts render_as_batch = opts.get("render_as_batch", False) if op.ops: lines = [] if render_as_batch: with autogen_context._within_batch(): lines.append( "with op.batch_alter_table(%r, schema=%r) as batch_op:" % (op.table_name, op.schema) ) for t_op in op.ops: t_lines = render_op(autogen_context, t_op) lines.extend(t_lines) lines.append("") else: for t_op in op.ops: t_lines = render_op(autogen_context, t_op) lines.extend(t_lines) return lines else: return [] @renderers.dispatch_for(ops.CreateTableCommentOp) def _render_create_table_comment( autogen_context: "AutogenContext", op: "ops.CreateTableCommentOp" ) -> str: templ = ( "{prefix}create_table_comment(\n" "{indent}'{tname}',\n" "{indent}{comment},\n" "{indent}existing_comment={existing},\n" "{indent}schema={schema}\n" ")" ) return templ.format( prefix=_alembic_autogenerate_prefix(autogen_context), tname=op.table_name, comment="%r" % op.comment if op.comment is not None else None, existing="%r" % op.existing_comment if op.existing_comment is not None else None, schema="'%s'" % op.schema if op.schema is not None else None, indent=" ", ) @renderers.dispatch_for(ops.DropTableCommentOp) def _render_drop_table_comment( autogen_context: "AutogenContext", op: "ops.DropTableCommentOp" ) -> str: templ = ( "{prefix}drop_table_comment(\n" "{indent}'{tname}',\n" "{indent}existing_comment={existing},\n" "{indent}schema={schema}\n" ")" ) return templ.format( prefix=_alembic_autogenerate_prefix(autogen_context), tname=op.table_name, existing="%r" % op.existing_comment if op.existing_comment is not None else None, schema="'%s'" % op.schema if op.schema is not None else None, indent=" ", ) @renderers.dispatch_for(ops.CreateTableOp) def _add_table( autogen_context: "AutogenContext", op: "ops.CreateTableOp" ) -> str: table = op.to_table() args = [ col for col in [ _render_column(col, autogen_context) for col in table.columns ] if col ] + sorted( [ rcons for rcons in [ _render_constraint( cons, autogen_context, op._namespace_metadata ) for cons in table.constraints ] if rcons is not None ] ) if len(args) > MAX_PYTHON_ARGS: args_str = "*[" + ",\n".join(args) + "]" else: args_str = ",\n".join(args) text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % { "tablename": _ident(op.table_name), "prefix": _alembic_autogenerate_prefix(autogen_context), "args": args_str, } if op.schema: text += ",\nschema=%r" % _ident(op.schema) comment = table.comment if comment: text += ",\ncomment=%r" % _ident(comment) for k in sorted(op.kw): text += ",\n%s=%r" % (k.replace(" ", "_"), op.kw[k]) if table._prefixes: prefixes = ", ".join("'%s'" % p for p in table._prefixes) text += ",\nprefixes=[%s]" % prefixes text += "\n)" return text @renderers.dispatch_for(ops.DropTableOp) def _drop_table( autogen_context: "AutogenContext", op: "ops.DropTableOp" ) -> str: text = "%(prefix)sdrop_table(%(tname)r" % { "prefix": _alembic_autogenerate_prefix(autogen_context), "tname": _ident(op.table_name), } if op.schema: text += ", schema=%r" % _ident(op.schema) text += ")" return text @renderers.dispatch_for(ops.CreateIndexOp) def _add_index( autogen_context: "AutogenContext", op: "ops.CreateIndexOp" ) -> str: index = op.to_index() has_batch = autogen_context._has_batch if has_batch: tmpl = ( "%(prefix)screate_index(%(name)r, [%(columns)s], " "unique=%(unique)r%(kwargs)s)" ) else: tmpl = ( "%(prefix)screate_index(%(name)r, %(table)r, [%(columns)s], " "unique=%(unique)r%(schema)s%(kwargs)s)" ) assert index.table is not None text = tmpl % { "prefix": _alembic_autogenerate_prefix(autogen_context), "name": _render_gen_name(autogen_context, index.name), "table": _ident(index.table.name), "columns": ", ".join( _get_index_rendered_expressions(index, autogen_context) ), "unique": index.unique or False, "schema": (", schema=%r" % _ident(index.table.schema)) if index.table.schema else "", "kwargs": ( ", " + ", ".join( [ "%s=%s" % (key, _render_potential_expr(val, autogen_context)) for key, val in index.kwargs.items() ] ) ) if len(index.kwargs) else "", } return text @renderers.dispatch_for(ops.DropIndexOp) def _drop_index( autogen_context: "AutogenContext", op: "ops.DropIndexOp" ) -> str: index = op.to_index() has_batch = autogen_context._has_batch if has_batch: tmpl = "%(prefix)sdrop_index(%(name)r%(kwargs)s)" else: tmpl = ( "%(prefix)sdrop_index(%(name)r, " "table_name=%(table_name)r%(schema)s%(kwargs)s)" ) text = tmpl % { "prefix": _alembic_autogenerate_prefix(autogen_context), "name": _render_gen_name(autogen_context, op.index_name), "table_name": _ident(op.table_name), "schema": ((", schema=%r" % _ident(op.schema)) if op.schema else ""), "kwargs": ( ", " + ", ".join( [ "%s=%s" % (key, _render_potential_expr(val, autogen_context)) for key, val in index.kwargs.items() ] ) ) if len(index.kwargs) else "", } return text @renderers.dispatch_for(ops.CreateUniqueConstraintOp) def _add_unique_constraint( autogen_context: "AutogenContext", op: "ops.CreateUniqueConstraintOp" ) -> List[str]: return [_uq_constraint(op.to_constraint(), autogen_context, True)] @renderers.dispatch_for(ops.CreateForeignKeyOp) def _add_fk_constraint( autogen_context: "AutogenContext", op: "ops.CreateForeignKeyOp" ) -> str: args = [repr(_render_gen_name(autogen_context, op.constraint_name))] if not autogen_context._has_batch: args.append(repr(_ident(op.source_table))) args.extend( [ repr(_ident(op.referent_table)), repr([_ident(col) for col in op.local_cols]), repr([_ident(col) for col in op.remote_cols]), ] ) kwargs = [ "referent_schema", "onupdate", "ondelete", "initially", "deferrable", "use_alter", ] if not autogen_context._has_batch: kwargs.insert(0, "source_schema") for k in kwargs: if k in op.kw: value = op.kw[k] if value is not None: args.append("%s=%r" % (k, value)) return "%(prefix)screate_foreign_key(%(args)s)" % { "prefix": _alembic_autogenerate_prefix(autogen_context), "args": ", ".join(args), } @renderers.dispatch_for(ops.CreatePrimaryKeyOp) def _add_pk_constraint(constraint, autogen_context): raise NotImplementedError() @renderers.dispatch_for(ops.CreateCheckConstraintOp) def _add_check_constraint(constraint, autogen_context): raise NotImplementedError() @renderers.dispatch_for(ops.DropConstraintOp) def _drop_constraint( autogen_context: "AutogenContext", op: "ops.DropConstraintOp" ) -> str: if autogen_context._has_batch: template = "%(prefix)sdrop_constraint" "(%(name)r, type_=%(type)r)" else: template = ( "%(prefix)sdrop_constraint" "(%(name)r, '%(table_name)s'%(schema)s, type_=%(type)r)" ) text = template % { "prefix": _alembic_autogenerate_prefix(autogen_context), "name": _render_gen_name(autogen_context, op.constraint_name), "table_name": _ident(op.table_name), "type": op.constraint_type, "schema": (", schema=%r" % _ident(op.schema)) if op.schema else "", } return text @renderers.dispatch_for(ops.AddColumnOp) def _add_column( autogen_context: "AutogenContext", op: "ops.AddColumnOp" ) -> str: schema, tname, column = op.schema, op.table_name, op.column if autogen_context._has_batch: template = "%(prefix)sadd_column(%(column)s)" else: template = "%(prefix)sadd_column(%(tname)r, %(column)s" if schema: template += ", schema=%(schema)r" template += ")" text = template % { "prefix": _alembic_autogenerate_prefix(autogen_context), "tname": tname, "column": _render_column(column, autogen_context), "schema": schema, } return text @renderers.dispatch_for(ops.DropColumnOp) def _drop_column( autogen_context: "AutogenContext", op: "ops.DropColumnOp" ) -> str: schema, tname, column_name = op.schema, op.table_name, op.column_name if autogen_context._has_batch: template = "%(prefix)sdrop_column(%(cname)r)" else: template = "%(prefix)sdrop_column(%(tname)r, %(cname)r" if schema: template += ", schema=%(schema)r" template += ")" text = template % { "prefix": _alembic_autogenerate_prefix(autogen_context), "tname": _ident(tname), "cname": _ident(column_name), "schema": _ident(schema), } return text @renderers.dispatch_for(ops.AlterColumnOp) def _alter_column( autogen_context: "AutogenContext", op: "ops.AlterColumnOp" ) -> str: tname = op.table_name cname = op.column_name server_default = op.modify_server_default type_ = op.modify_type nullable = op.modify_nullable comment = op.modify_comment autoincrement = op.kw.get("autoincrement", None) existing_type = op.existing_type existing_nullable = op.existing_nullable existing_comment = op.existing_comment existing_server_default = op.existing_server_default schema = op.schema indent = " " * 11 if autogen_context._has_batch: template = "%(prefix)salter_column(%(cname)r" else: template = "%(prefix)salter_column(%(tname)r, %(cname)r" text = template % { "prefix": _alembic_autogenerate_prefix(autogen_context), "tname": tname, "cname": cname, } if existing_type is not None: text += ",\n%sexisting_type=%s" % ( indent, _repr_type(existing_type, autogen_context), ) if server_default is not False: rendered = _render_server_default(server_default, autogen_context) text += ",\n%sserver_default=%s" % (indent, rendered) if type_ is not None: text += ",\n%stype_=%s" % (indent, _repr_type(type_, autogen_context)) if nullable is not None: text += ",\n%snullable=%r" % (indent, nullable) if comment is not False: text += ",\n%scomment=%r" % (indent, comment) if existing_comment is not None: text += ",\n%sexisting_comment=%r" % (indent, existing_comment) if nullable is None and existing_nullable is not None: text += ",\n%sexisting_nullable=%r" % (indent, existing_nullable) if autoincrement is not None: text += ",\n%sautoincrement=%r" % (indent, autoincrement) if server_default is False and existing_server_default: rendered = _render_server_default( existing_server_default, autogen_context ) text += ",\n%sexisting_server_default=%s" % (indent, rendered) if schema and not autogen_context._has_batch: text += ",\n%sschema=%r" % (indent, schema) text += ")" return text class _f_name: def __init__(self, prefix: str, name: conv) -> None: self.prefix = prefix self.name = name def __repr__(self) -> str: return "%sf(%r)" % (self.prefix, _ident(self.name)) def _ident(name: Optional[Union["quoted_name", str]]) -> Optional[str]: if name is None: return name elif isinstance(name, sql.elements.quoted_name): return compat.text_type(name) elif isinstance(name, compat.string_types): return name def _render_potential_expr( value: Any, autogen_context: "AutogenContext", wrap_in_text: bool = True, is_server_default: bool = False, ) -> str: if isinstance(value, sql.ClauseElement): if wrap_in_text: template = "%(prefix)stext(%(sql)r)" else: template = "%(sql)r" return template % { "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), "sql": autogen_context.migration_context.impl.render_ddl_sql_expr( value, is_server_default=is_server_default ), } else: return repr(value) def _get_index_rendered_expressions( idx: "Index", autogen_context: "AutogenContext" ) -> List[str]: return [ repr(_ident(getattr(exp, "name", None))) if isinstance(exp, sa_schema.Column) else _render_potential_expr(exp, autogen_context) for exp in idx.expressions ] def _uq_constraint( constraint: "UniqueConstraint", autogen_context: "AutogenContext", alter: bool, ) -> str: opts: List[Tuple[str, Any]] = [] has_batch = autogen_context._has_batch if constraint.deferrable: opts.append(("deferrable", str(constraint.deferrable))) if constraint.initially: opts.append(("initially", str(constraint.initially))) if not has_batch and alter and constraint.table.schema: opts.append(("schema", _ident(constraint.table.schema))) if not alter and constraint.name: opts.append( ("name", _render_gen_name(autogen_context, constraint.name)) ) if alter: args = [repr(_render_gen_name(autogen_context, constraint.name))] if not has_batch: args += [repr(_ident(constraint.table.name))] args.append(repr([_ident(col.name) for col in constraint.columns])) args.extend(["%s=%r" % (k, v) for k, v in opts]) return "%(prefix)screate_unique_constraint(%(args)s)" % { "prefix": _alembic_autogenerate_prefix(autogen_context), "args": ", ".join(args), } else: args = [repr(_ident(col.name)) for col in constraint.columns] args.extend(["%s=%r" % (k, v) for k, v in opts]) return "%(prefix)sUniqueConstraint(%(args)s)" % { "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), "args": ", ".join(args), } def _user_autogenerate_prefix(autogen_context, target): prefix = autogen_context.opts["user_module_prefix"] if prefix is None: return "%s." % target.__module__ else: return prefix def _sqlalchemy_autogenerate_prefix(autogen_context: "AutogenContext") -> str: return autogen_context.opts["sqlalchemy_module_prefix"] or "" def _alembic_autogenerate_prefix(autogen_context: "AutogenContext") -> str: if autogen_context._has_batch: return "batch_op." else: return autogen_context.opts["alembic_module_prefix"] or "" def _user_defined_render( type_: str, object_: Any, autogen_context: "AutogenContext" ) -> Union[str, "Literal[False]"]: if "render_item" in autogen_context.opts: render = autogen_context.opts["render_item"] if render: rendered = render(type_, object_, autogen_context) if rendered is not False: return rendered return False def _render_column(column: "Column", autogen_context: "AutogenContext") -> str: rendered = _user_defined_render("column", column, autogen_context) if rendered is not False: return rendered args: List[str] = [] opts: List[Tuple[str, Any]] = [] if column.server_default: rendered = _render_server_default( column.server_default, autogen_context ) if rendered: if _should_render_server_default_positionally( column.server_default ): args.append(rendered) else: opts.append(("server_default", rendered)) if ( column.autoincrement is not None and column.autoincrement != sqla_compat.AUTOINCREMENT_DEFAULT ): opts.append(("autoincrement", column.autoincrement)) if column.nullable is not None: opts.append(("nullable", column.nullable)) if column.system: opts.append(("system", column.system)) comment = column.comment if comment: opts.append(("comment", "%r" % comment)) return "%(prefix)sColumn(%(name)r, %(type)s, %(args)s%(kwargs)s)" % { "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), "name": _ident(column.name), "type": _repr_type(column.type, autogen_context), "args": ", ".join([str(arg) for arg in args]) + ", " if args else "", "kwargs": ( ", ".join( ["%s=%s" % (kwname, val) for kwname, val in opts] + [ "%s=%s" % (key, _render_potential_expr(val, autogen_context)) for key, val in sqla_compat._column_kwargs(column).items() ] ) ), } def _should_render_server_default_positionally( server_default: Union["Computed", "DefaultClause"] ) -> bool: return sqla_compat._server_default_is_computed( server_default ) or sqla_compat._server_default_is_identity(server_default) def _render_server_default( default: Optional[ Union["FetchedValue", str, "TextClause", "ColumnElement"] ], autogen_context: "AutogenContext", repr_: bool = True, ) -> Optional[str]: rendered = _user_defined_render("server_default", default, autogen_context) if rendered is not False: return rendered if sqla_compat._server_default_is_computed(default): return _render_computed(cast("Computed", default), autogen_context) elif sqla_compat._server_default_is_identity(default): return _render_identity(cast("Identity", default), autogen_context) elif isinstance(default, sa_schema.DefaultClause): if isinstance(default.arg, compat.string_types): default = default.arg else: return _render_potential_expr( default.arg, autogen_context, is_server_default=True ) if isinstance(default, string_types) and repr_: default = repr(re.sub(r"^'|'$", "", default)) return cast(str, default) def _render_computed( computed: "Computed", autogen_context: "AutogenContext" ) -> str: text = _render_potential_expr( computed.sqltext, autogen_context, wrap_in_text=False ) kwargs = {} if computed.persisted is not None: kwargs["persisted"] = computed.persisted return "%(prefix)sComputed(%(text)s, %(kwargs)s)" % { "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), "text": text, "kwargs": (", ".join("%s=%s" % pair for pair in kwargs.items())), } def _render_identity( identity: "Identity", autogen_context: "AutogenContext" ) -> str: kwargs = OrderedDict(always=identity.always) if identity.on_null is not None: kwargs["on_null"] = identity.on_null kwargs.update(_get_identity_options(identity)) return "%(prefix)sIdentity(%(kwargs)s)" % { "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), "kwargs": (", ".join("%s=%s" % pair for pair in kwargs.items())), } def _get_identity_options(identity_options: "Identity") -> OrderedDict: kwargs = OrderedDict() for attr in sqla_compat._identity_options_attrs: value = getattr(identity_options, attr, None) if value is not None: kwargs[attr] = value return kwargs def _repr_type(type_: "TypeEngine", autogen_context: "AutogenContext") -> str: rendered = _user_defined_render("type", type_, autogen_context) if rendered is not False: return rendered if hasattr(autogen_context.migration_context, "impl"): impl_rt = autogen_context.migration_context.impl.render_type( type_, autogen_context ) else: impl_rt = None mod = type(type_).__module__ imports = autogen_context.imports if mod.startswith("sqlalchemy.dialects"): match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod) assert match is not None dname = match.group(1) if imports is not None: imports.add("from sqlalchemy.dialects import %s" % dname) if impl_rt: return impl_rt else: return "%s.%r" % (dname, type_) elif impl_rt: return impl_rt elif mod.startswith("sqlalchemy."): if type(type_) is sqltypes.Variant: return _render_Variant_type(type_, autogen_context) if "_render_%s_type" % type_.__visit_name__ in globals(): fn = globals()["_render_%s_type" % type_.__visit_name__] return fn(type_, autogen_context) else: prefix = _sqlalchemy_autogenerate_prefix(autogen_context) return "%s%r" % (prefix, type_) else: prefix = _user_autogenerate_prefix(autogen_context, type_) return "%s%r" % (prefix, type_) def _render_ARRAY_type( type_: "ARRAY", autogen_context: "AutogenContext" ) -> str: return cast( str, _render_type_w_subtype( type_, autogen_context, "item_type", r"(.+?\()" ), ) def _render_Variant_type( type_: "Variant", autogen_context: "AutogenContext" ) -> str: base = _repr_type(type_.impl, autogen_context) assert base is not None and base is not False for dialect in sorted(type_.mapping): typ = type_.mapping[dialect] base += ".with_variant(%s, %r)" % ( _repr_type(typ, autogen_context), dialect, ) return base def _render_type_w_subtype( type_: "TypeEngine", autogen_context: "AutogenContext", attrname: str, regexp: str, prefix: Optional[str] = None, ) -> Union[Optional[str], "Literal[False]"]: outer_repr = repr(type_) inner_type = getattr(type_, attrname, None) if inner_type is None: return False inner_repr = repr(inner_type) inner_repr = re.sub(r"([\(\)])", r"\\\1", inner_repr) sub_type = _repr_type(getattr(type_, attrname), autogen_context) outer_type = re.sub(regexp + inner_repr, r"\1%s" % sub_type, outer_repr) if prefix: return "%s%s" % (prefix, outer_type) mod = type(type_).__module__ if mod.startswith("sqlalchemy.dialects"): match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod) assert match is not None dname = match.group(1) return "%s.%s" % (dname, outer_type) elif mod.startswith("sqlalchemy"): prefix = _sqlalchemy_autogenerate_prefix(autogen_context) return "%s%s" % (prefix, outer_type) else: return None _constraint_renderers = util.Dispatcher() def _render_constraint( constraint: "Constraint", autogen_context: "AutogenContext", namespace_metadata: Optional["MetaData"], ) -> Optional[str]: try: renderer = _constraint_renderers.dispatch(constraint) except ValueError: util.warn("No renderer is established for object %r" % constraint) return "[Unknown Python object %r]" % constraint else: return renderer(constraint, autogen_context, namespace_metadata) @_constraint_renderers.dispatch_for(sa_schema.PrimaryKeyConstraint) def _render_primary_key( constraint: "PrimaryKeyConstraint", autogen_context: "AutogenContext", namespace_metadata: Optional["MetaData"], ) -> Optional[str]: rendered = _user_defined_render("primary_key", constraint, autogen_context) if rendered is not False: return rendered if not constraint.columns: return None opts = [] if constraint.name: opts.append( ("name", repr(_render_gen_name(autogen_context, constraint.name))) ) return "%(prefix)sPrimaryKeyConstraint(%(args)s)" % { "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), "args": ", ".join( [repr(c.name) for c in constraint.columns] + ["%s=%s" % (kwname, val) for kwname, val in opts] ), }
MIT License
dropbox/stone
stone/compiler.py
Compiler.build
python
def build(self): if os.path.exists(self.build_path) and not os.path.isdir(self.build_path): self._logger.error('Output path must be a folder if it already exists') return Compiler._mkdir(self.build_path) self._execute_backend_on_spec()
Creates outputs. Outputs are files made by a backend.
https://github.com/dropbox/stone/blob/68bc8756c1b1182f919cc61ef8288088ad5a1847/stone/compiler.py#L69-L75
from __future__ import absolute_import, division, print_function, unicode_literals import logging import inspect import os import shutil import traceback from stone.backend import ( Backend, remove_aliases_from_api, ) class BackendException(Exception): def __init__(self, backend_name, tb): super(BackendException, self).__init__() self.backend_name = backend_name self.traceback = tb class Compiler(object): backend_extension = '.stoneg' def __init__(self, api, backend_module, backend_args, build_path, clean_build=False): self._logger = logging.getLogger('stone.compiler') self.api = api self.backend_module = backend_module self.backend_args = backend_args self.build_path = build_path if clean_build and os.path.exists(self.build_path): logging.info('Cleaning existing build directory %s...', self.build_path) shutil.rmtree(self.build_path)
MIT License
peterdemin/pip-compile-multi
pipcompilemulti/cli_v1.py
verify
python
def verify(ctx): sys.excepthook = exception_hook ctx.exit(0 if verify_environments() else 1)
For each environment verify hash comments and report failures. If any failure occured, exit with code 1.
https://github.com/peterdemin/pip-compile-multi/blob/6e0f7c00445a6de11c34a3ad29feca46cea0a58f/pipcompilemulti/cli_v1.py#L32-L40
import os import sys import logging from traceback import print_exception import click from .actions import recompile from .verify import verify_environments from .features import FEATURES THIS_FILE = os.path.abspath(__file__) @click.group(invoke_without_command=True) @click.pass_context @FEATURES.bind def cli(ctx): logging.basicConfig(level=logging.DEBUG, format="%(message)s") sys.excepthook = exception_hook if ctx.invoked_subcommand is None: recompile() @cli.command() @click.pass_context @FEATURES.bind
MIT License
openstack/cinder
cinder/volume/drivers/netapp/utils.py
map_aqos_spec
python
def map_aqos_spec(qos_spec, volume): if qos_spec is None: return None qos_spec = map_dict_to_lower(qos_spec) spec = dict(policy_name=get_qos_policy_group_name(volume)) if 'expectediopspergib' in qos_spec: spec['expected_iops'] = ( '%sIOPS/GB' % qos_spec['expectediopspergib']) if 'peakiopspergib' in qos_spec: spec['peak_iops'] = '%sIOPS/GB' % qos_spec['peakiopspergib'] if 'expectediopsallocation' in qos_spec: spec['expected_iops_allocation'] = qos_spec['expectediopsallocation'] if 'peakiopsallocation' in qos_spec: spec['peak_iops_allocation'] = qos_spec['peakiopsallocation'] if 'absoluteminiops' in qos_spec: spec['absolute_min_iops'] = '%sIOPS' % qos_spec['absoluteminiops'] if 'blocksize' in qos_spec: spec['block_size'] = qos_spec['blocksize'] if 'peak_iops' not in spec or 'expected_iops' not in spec: msg = _('Adaptive QoS requires the expected property and ' 'the peak property set together.') raise exception.Invalid(msg) if spec['peak_iops'] < spec['expected_iops']: msg = _('Adaptive maximum limit should be greater than or equal to ' 'the adaptive minimum limit.') raise exception.Invalid(msg) return spec
Map Cinder QOS spec to Adaptive QoS values.
https://github.com/openstack/cinder/blob/4558e4b53a7e41dc1263417a4824f39bb6fd30e1/cinder/volume/drivers/netapp/utils.py#L350-L383
import decimal import platform import re from oslo_concurrency import processutils as putils from oslo_log import log as logging from oslo_utils import netutils from cinder import context from cinder import exception from cinder.i18n import _ from cinder import utils from cinder import version from cinder.volume import qos_specs from cinder.volume import volume_types LOG = logging.getLogger(__name__) OPENSTACK_PREFIX = 'openstack-' OBSOLETE_SSC_SPECS = {'netapp:raid_type': 'netapp_raid_type', 'netapp:disk_type': 'netapp_disk_type'} DEPRECATED_SSC_SPECS = {'netapp_unmirrored': 'netapp_mirrored', 'netapp_nodedup': 'netapp_dedup', 'netapp_nocompression': 'netapp_compression', 'netapp_thick_provisioned': 'netapp_thin_provisioned'} MIN_QOS_KEYS = frozenset([ 'minIOPS', 'minIOPSperGiB', ]) MAX_QOS_KEYS = frozenset([ 'maxIOPS', 'maxIOPSperGiB', 'maxBPS', 'maxBPSperGiB', ]) ADAPTIVE_QOS_KEYS = frozenset([ 'expectedIOPSperGiB', 'peakIOPSperGiB', 'expectedIOPSAllocation', 'peakIOPSAllocation', 'absoluteMinIOPS', 'blockSize', ]) QOS_ADAPTIVE_POLICY_GROUP_SPEC_KEYS = frozenset([ 'expected_iops', 'peak_iops', 'expected_iops_allocation', 'peak_iops_allocation', 'absolute_min_iops', 'block_size', 'policy_name', ]) BACKEND_QOS_CONSUMERS = frozenset(['back-end', 'both']) CHAP_SECRET_LENGTH = 16 DEFAULT_CHAP_USER_NAME = 'NetApp_iSCSI_CHAP_Username' API_TRACE_PATTERN = '(.*)' class NetAppDriverException(exception.VolumeDriverException): message = _("NetApp Cinder Driver exception.") class GeometryHasChangedOnDestination(NetAppDriverException): message = _("Geometry has changed on destination volume.") class NetAppDriverTimeout(NetAppDriverException): message = _("Timeout in NetApp Cinder Driver.") def validate_instantiation(**kwargs): if kwargs and kwargs.get('netapp_mode') == 'proxy': return LOG.warning("It is not the recommended way to use drivers by NetApp. " "Please use NetAppDriver to achieve the functionality.") def check_flags(required_flags, configuration): for flag in required_flags: if not getattr(configuration, flag, None): msg = _('Configuration value %s is not set.') % flag raise exception.InvalidInput(reason=msg) def to_bool(val): if val: strg = str(val).lower() if (strg == 'true' or strg == 'y' or strg == 'yes' or strg == 'enabled' or strg == '1'): return True else: return False else: return False @utils.synchronized("safe_set_attr") def set_safe_attr(instance, attr, val): if not instance or not attr: return False old_val = getattr(instance, attr, None) if val is None and old_val is None: return False elif val == old_val: return False else: setattr(instance, attr, val) return True def get_volume_extra_specs(volume): ctxt = context.get_admin_context() type_id = volume.get('volume_type_id') if type_id is None: return {} volume_type = volume_types.get_volume_type(ctxt, type_id) if volume_type is None: return {} extra_specs = volume_type.get('extra_specs', {}) log_extra_spec_warnings(extra_specs) return extra_specs def setup_api_trace_pattern(api_trace_pattern): global API_TRACE_PATTERN try: re.compile(api_trace_pattern) except (re.error, TypeError): msg = _('Cannot parse the API trace pattern. %s is not a ' 'valid python regular expression.') % api_trace_pattern raise exception.InvalidConfigurationValue(msg) API_TRACE_PATTERN = api_trace_pattern def trace_filter_func_api(all_args): na_element = all_args.get('na_element') if na_element is None: return True api_name = na_element.get_name() return re.match(API_TRACE_PATTERN, api_name) is not None def round_down(value, precision='0.00'): return float(decimal.Decimal(str(value)).quantize( decimal.Decimal(precision), rounding=decimal.ROUND_DOWN)) def log_extra_spec_warnings(extra_specs): for spec in (set(extra_specs.keys() if extra_specs else []) & set(OBSOLETE_SSC_SPECS.keys())): LOG.warning('Extra spec %(old)s is obsolete. Use %(new)s ' 'instead.', {'old': spec, 'new': OBSOLETE_SSC_SPECS[spec]}) for spec in (set(extra_specs.keys() if extra_specs else []) & set(DEPRECATED_SSC_SPECS.keys())): LOG.warning('Extra spec %(old)s is deprecated. Use %(new)s ' 'instead.', {'old': spec, 'new': DEPRECATED_SSC_SPECS[spec]}) def get_iscsi_connection_properties(lun_id, volume, iqns, addresses, ports): addresses = [netutils.escape_ipv6(a) if netutils.is_valid_ipv6(a) else a for a in addresses] lun_id = int(lun_id) if isinstance(iqns, str): iqns = [iqns] * len(addresses) target_portals = ['%s:%s' % (a, p) for a, p in zip(addresses, ports)] properties = {} properties['target_discovered'] = False properties['target_portal'] = target_portals[0] properties['target_iqn'] = iqns[0] properties['target_lun'] = lun_id properties['volume_id'] = volume['id'] if len(addresses) > 1: properties['target_portals'] = target_portals properties['target_iqns'] = iqns properties['target_luns'] = [lun_id] * len(addresses) auth = volume['provider_auth'] if auth: (auth_method, auth_username, auth_secret) = auth.split() properties['auth_method'] = auth_method properties['auth_username'] = auth_username properties['auth_password'] = auth_secret return { 'driver_volume_type': 'iscsi', 'data': properties, } def validate_qos_spec(qos_spec): if qos_spec is None: return normalized_min_keys = [key.lower() for key in MIN_QOS_KEYS] normalized_max_keys = [key.lower() for key in MAX_QOS_KEYS] normalized_aqos_keys = [key.lower() for key in ADAPTIVE_QOS_KEYS] unrecognized_keys = [ k for k in qos_spec.keys() if k.lower() not in normalized_max_keys + normalized_min_keys + normalized_aqos_keys] if unrecognized_keys: msg = _('Unrecognized QOS keywords: "%s"') % unrecognized_keys raise exception.Invalid(msg) min_dict = {k: v for k, v in qos_spec.items() if k.lower() in normalized_min_keys} if len(min_dict) > 1: msg = _('Only one minimum limit can be set in a QoS spec.') raise exception.Invalid(msg) max_dict = {k: v for k, v in qos_spec.items() if k.lower() in normalized_max_keys} if len(max_dict) > 1: msg = _('Only one maximum limit can be set in a QoS spec.') raise exception.Invalid(msg) aqos_dict = {k: v for k, v in qos_spec.items() if k.lower() in normalized_aqos_keys} if aqos_dict and (min_dict or max_dict): msg = _('Adaptive QoS specs and non-adaptive QoS specs ' 'cannot be used together.') raise exception.Invalid(msg) def get_volume_type_from_volume(volume): type_id = volume.get('volume_type_id') if type_id is None: return {} ctxt = context.get_admin_context() return volume_types.get_volume_type(ctxt, type_id) def _get_min_throughput_from_qos_spec(qos_spec, volume_size): if 'miniops' in qos_spec: min_throughput = '%siops' % qos_spec['miniops'] elif 'miniopspergib' in qos_spec: min_throughput = '%siops' % str( int(qos_spec['miniopspergib']) * int(volume_size)) else: min_throughput = None return min_throughput def _get_max_throughput_from_qos_spec(qos_spec, volume_size): if 'maxiops' in qos_spec: max_throughput = '%siops' % qos_spec['maxiops'] elif 'maxiopspergib' in qos_spec: max_throughput = '%siops' % str( int(qos_spec['maxiopspergib']) * int(volume_size)) elif 'maxbps' in qos_spec: max_throughput = '%sB/s' % qos_spec['maxbps'] elif 'maxbpspergib' in qos_spec: max_throughput = '%sB/s' % str( int(qos_spec['maxbpspergib']) * int(volume_size)) else: max_throughput = None return max_throughput def map_qos_spec(qos_spec, volume): if qos_spec is None: return None spec = map_dict_to_lower(qos_spec) min_throughput = _get_min_throughput_from_qos_spec(spec, volume['size']) max_throughput = _get_max_throughput_from_qos_spec(spec, volume['size']) if min_throughput and max_throughput and max_throughput.endswith('B/s'): msg = _('Maximum limit should be in IOPS when minimum limit is ' 'specified.') raise exception.Invalid(msg) if min_throughput and max_throughput and max_throughput < min_throughput: msg = _('Maximum limit should be greater than or equal to the ' 'minimum limit.') raise exception.Invalid(msg) policy = dict(policy_name=get_qos_policy_group_name(volume)) if min_throughput: policy['min_throughput'] = min_throughput if max_throughput: policy['max_throughput'] = max_throughput return policy
Apache License 2.0
brython-dev/brython
www/src/Lib/queue.py
_PySimpleQueue.qsize
python
def qsize(self): return len(self._queue)
Return the approximate size of the queue (not reliable!).
https://github.com/brython-dev/brython/blob/33aeaab551f1b73209326c5a0aecf98642d4c126/www/src/Lib/queue.py#L318-L320
import threading import types from collections import deque from heapq import heappush, heappop from time import monotonic as time try: from _queue import SimpleQueue except ImportError: SimpleQueue = None __all__ = ['Empty', 'Full', 'Queue', 'PriorityQueue', 'LifoQueue', 'SimpleQueue'] try: from _queue import Empty except ImportError: class Empty(Exception): pass class Full(Exception): pass class Queue: def __init__(self, maxsize=0): self.maxsize = maxsize self._init(maxsize) self.mutex = threading.Lock() self.not_empty = threading.Condition(self.mutex) self.not_full = threading.Condition(self.mutex) self.all_tasks_done = threading.Condition(self.mutex) self.unfinished_tasks = 0 def task_done(self): with self.all_tasks_done: unfinished = self.unfinished_tasks - 1 if unfinished <= 0: if unfinished < 0: raise ValueError('task_done() called too many times') self.all_tasks_done.notify_all() self.unfinished_tasks = unfinished def join(self): with self.all_tasks_done: while self.unfinished_tasks: self.all_tasks_done.wait() def qsize(self): with self.mutex: return self._qsize() def empty(self): with self.mutex: return not self._qsize() def full(self): with self.mutex: return 0 < self.maxsize <= self._qsize() def put(self, item, block=True, timeout=None): with self.not_full: if self.maxsize > 0: if not block: if self._qsize() >= self.maxsize: raise Full elif timeout is None: while self._qsize() >= self.maxsize: self.not_full.wait() elif timeout < 0: raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while self._qsize() >= self.maxsize: remaining = endtime - time() if remaining <= 0.0: raise Full self.not_full.wait(remaining) self._put(item) self.unfinished_tasks += 1 self.not_empty.notify() def get(self, block=True, timeout=None): with self.not_empty: if not block: if not self._qsize(): raise Empty elif timeout is None: while not self._qsize(): self.not_empty.wait() elif timeout < 0: raise ValueError("'timeout' must be a non-negative number") else: endtime = time() + timeout while not self._qsize(): remaining = endtime - time() if remaining <= 0.0: raise Empty self.not_empty.wait(remaining) item = self._get() self.not_full.notify() return item def put_nowait(self, item): return self.put(item, block=False) def get_nowait(self): return self.get(block=False) def _init(self, maxsize): self.queue = deque() def _qsize(self): return len(self.queue) def _put(self, item): self.queue.append(item) def _get(self): return self.queue.popleft() __class_getitem__ = classmethod(types.GenericAlias) class PriorityQueue(Queue): def _init(self, maxsize): self.queue = [] def _qsize(self): return len(self.queue) def _put(self, item): heappush(self.queue, item) def _get(self): return heappop(self.queue) class LifoQueue(Queue): def _init(self, maxsize): self.queue = [] def _qsize(self): return len(self.queue) def _put(self, item): self.queue.append(item) def _get(self): return self.queue.pop() class _PySimpleQueue: def __init__(self): self._queue = deque() self._count = threading.Semaphore(0) def put(self, item, block=True, timeout=None): self._queue.append(item) self._count.release() def get(self, block=True, timeout=None): if timeout is not None and timeout < 0: raise ValueError("'timeout' must be a non-negative number") if not self._count.acquire(block, timeout): raise Empty return self._queue.popleft() def put_nowait(self, item): return self.put(item, block=False) def get_nowait(self): return self.get(block=False) def empty(self): return len(self._queue) == 0
BSD 3-Clause New or Revised License
python-sifter/sifter
sifter/grammar/grammar.py
p_command
python
def p_command(p): tests = p[2].get('tests') block = None if p[3] != ';': block = p[3] handler = sifter.handler.get('command', p[1]) if handler is None: print("No handler registered for command '%s' on line %d" % (p[1], p.lineno(1))) raise SyntaxError p[0] = handler(arguments=p[2]['args'], tests=tests, block=block)
command : IDENTIFIER arguments ';' | IDENTIFIER arguments block
https://github.com/python-sifter/sifter/blob/cb2656ac47125e9e06c9bdda56193da41cc340a8/sifter/grammar/grammar.py#L42-L54
import ply.yacc import sifter.grammar from sifter.grammar.lexer import tokens import sifter.handler __all__ = ('parser',) def parser(**kwargs): return ply.yacc.yacc(**kwargs) def p_commands_list(p): p[0] = p[1] if p[2].RULE_IDENTIFIER == 'REQUIRE': if any(command.RULE_IDENTIFIER != 'REQUIRE' for command in p[0].commands): print("REQUIRE command on line %d must come before any " "other non-REQUIRE commands" % p.lineno(2)) raise SyntaxError elif p[2].RULE_IDENTIFIER in ('ELSIF', 'ELSE'): if p[0].commands[-1].RULE_IDENTIFIER not in ('IF', 'ELSIF'): print("ELSIF/ELSE command on line %d must follow an IF/ELSIF " "command" % p.lineno(2)) raise SyntaxError p[0].commands.append(p[2]) def p_commands_empty(p): p[0] = sifter.grammar.CommandList()
BSD 2-Clause Simplified License
netflix/lemur
lemur/certificates/service.py
get_certificates_with_same_prefix_with_rotate_on
python
def get_certificates_with_same_prefix_with_rotate_on(prefix): now = arrow.now().format("YYYY-MM-DD") return ( Certificate.query.filter(Certificate.name.like(prefix)) .filter(Certificate.rotation == true()) .filter(Certificate.not_after >= now) .filter(not_(Certificate.replaced.any())) .all() )
Find certificates with given prefix that are still valid, not replaced and marked for auto-rotate :param prefix: prefix to match :return:
https://github.com/netflix/lemur/blob/778c66ff6e4a82cebebffd25033b26240b7479a4/lemur/certificates/service.py#L305-L319
import re import time from collections import defaultdict from itertools import groupby import arrow from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization from flask import current_app from sentry_sdk import capture_exception from sqlalchemy import and_, func, or_, not_, cast, Integer from sqlalchemy.sql import text from sqlalchemy.sql.expression import false, true from lemur import database from lemur.authorities.models import Authority from lemur.certificates.models import Certificate, CertificateAssociation from lemur.certificates.schemas import CertificateOutputSchema, CertificateInputSchema from lemur.common.utils import generate_private_key, truthiness, parse_serial, get_certificate_via_tls, windowed_query from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS from lemur.destinations.models import Destination from lemur.domains.models import Domain from lemur.endpoints import service as endpoint_service from lemur.extensions import metrics, signals from lemur.notifications.messaging import send_revocation_notification from lemur.notifications.models import Notification from lemur.pending_certificates.models import PendingCertificate from lemur.plugins.base import plugins from lemur.roles import service as role_service from lemur.roles.models import Role csr_created = signals.signal("csr_created", "CSR generated") csr_imported = signals.signal("csr_imported", "CSR imported from external source") certificate_issued = signals.signal( "certificate_issued", "Authority issued a certificate" ) certificate_imported = signals.signal( "certificate_imported", "Certificate imported from external source" ) def get(cert_id): return database.get(Certificate, cert_id) def get_by_name(name): return database.get(Certificate, name, field="name") def get_by_serial(serial): if isinstance(serial, int): serial = str(serial) return Certificate.query.filter(Certificate.serial == serial).all() def get_by_attributes(conditions): for attr in conditions.keys(): if attr not in Certificate.__table__.columns: conditions.pop(attr) query = database.session_query(Certificate) return database.find_all(query, Certificate, conditions).all() def delete(cert_id): database.delete(get(cert_id)) def get_all_certs(): return Certificate.query.all() def get_all_valid_certs(authority_plugin_name, paginate=False, page=1, count=1000): assert (page > 0) query = database.session_query(Certificate) if paginate else Certificate.query if authority_plugin_name: query = query.outerjoin(Authority, Authority.id == Certificate.authority_id).filter( Certificate.not_after > arrow.now().format("YYYY-MM-DD")).filter( Authority.plugin_name.in_(authority_plugin_name)).filter(Certificate.revoked.is_(False)) else: query = query.filter(Certificate.not_after > arrow.now().format("YYYY-MM-DD")).filter( Certificate.revoked.is_(False)) if paginate: items = database.paginate(query, page, count) return items['items'] return query.all() def get_all_pending_cleaning_expired(source): return ( Certificate.query.filter(Certificate.sources.any(id=source.id)) .filter(not_(Certificate.endpoints.any())) .filter(Certificate.expired) .all() ) def get_all_certs_attached_to_endpoint_without_autorotate(): return ( Certificate.query.filter(Certificate.endpoints.any()) .filter(Certificate.rotation == false()) .filter(Certificate.revoked == false()) .filter(Certificate.not_after >= arrow.now()) .filter(not_(Certificate.replaced.any())) .all() ) def get_all_certs_attached_to_destination_without_autorotate(plugin_name=None): if plugin_name: return ( Certificate.query.filter(Certificate.destinations.any(plugin_name=plugin_name)) .filter(Certificate.rotation == false()) .filter(Certificate.revoked == false()) .filter(Certificate.not_after >= arrow.now()) .filter(not_(Certificate.replaced.any())) .all() ) return ( Certificate.query.filter(Certificate.destinations.any()) .filter(Certificate.rotation == false()) .filter(Certificate.revoked == false()) .filter(Certificate.not_after >= arrow.now()) .filter(not_(Certificate.replaced.any())) .all() ) def get_all_pending_cleaning_expiring_in_days(source, days_to_expire): expiration_window = arrow.now().shift(days=+days_to_expire).format("YYYY-MM-DD") return ( Certificate.query.filter(Certificate.sources.any(id=source.id)) .filter(not_(Certificate.endpoints.any())) .filter(Certificate.not_after < expiration_window) .all() ) def get_all_pending_cleaning_issued_since_days(source, days_since_issuance): not_in_use_window = ( arrow.now().shift(days=-days_since_issuance).format("YYYY-MM-DD") ) return ( Certificate.query.filter(Certificate.sources.any(id=source.id)) .filter(not_(Certificate.endpoints.any())) .filter(Certificate.date_created > not_in_use_window) .all() ) def get_all_pending_reissue(): return ( Certificate.query.filter(Certificate.rotation == true()) .filter(not_(Certificate.replaced.any())) .filter(Certificate.in_rotation_window == true()) .all() ) def find_duplicates(cert): if cert["chain"]: return Certificate.query.filter_by( body=cert["body"].strip(), chain=cert["chain"].strip() ).all() else: return Certificate.query.filter_by(body=cert["body"].strip(), chain=None).all() def list_duplicate_certs_by_authority(authority_ids, days_since_issuance): now = arrow.now().format("YYYY-MM-DD") query = database.session_query(Certificate) .filter(Certificate.authority_id.in_(authority_ids)) .filter(Certificate.not_after >= now) .filter(Certificate.rotation == true()) .filter(not_(Certificate.replaced.any())) .filter(text("name ~ '.*-[0-9]{8}-[0-9]{8}-.*'")) if days_since_issuance: issuance_window = ( arrow.now().shift(days=-days_since_issuance).format("YYYY-MM-DD") ) query = query.filter(Certificate.date_created >= issuance_window) return query.all()
Apache License 2.0
speechmatics/speechmatics-python
tests/conftest.py
server_ssl_context
python
def server_ssl_context(): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS) ssl_context.load_cert_chain( path_to_test_resource("dummy_cert"), keyfile=path_to_test_resource("dummy_key"), password=lambda: "rohho3Uf", ) return ssl_context
Returns an SSL context for the mock RT server to use, with a self signed certificate.
https://github.com/speechmatics/speechmatics-python/blob/71ff5beb78209dad8527d64689cf1d2591b107dc/tests/conftest.py#L11-L22
import ssl import threading import pytest from SimpleWebSocketServer import SimpleSSLWebSocketServer from .mock_rt_server import MockRealtimeLogbook, MockRealtimeServer from .utils import path_to_test_resource
MIT License
thanethomson/statik
statik/views.py
StatikViewPath.render
python
def render(self, inst=None, context=None): raise NotImplementedError()
Must render this path (optionally according to the given instance). Returns: A string containing the rendered path.
https://github.com/thanethomson/statik/blob/ea422b8fccd1430f60e3d8b62d9221365ec4e31f/statik/views.py#L49-L55
from copy import deepcopy, copy from statik.common import YamlLoadable from statik.errors import * from statik.utils import * from statik.context import StatikContext import logging logger = logging.getLogger(__name__) __all__ = [ 'StatikView', 'StatikViewPath', 'StatikViewSimplePath', 'StatikViewComplexPath', 'StatikViewRenderer', 'StatikSimpleViewRenderer', 'StatikComplexViewRenderer' ] class StatikViewPath(object): def __init__( self, path, output_own_subfolder=True, output_filename='index', output_ext='.html', view_name=None, error_context=None, ): self.path = path self.output_own_subfolder = output_own_subfolder self.output_filename = output_filename self.output_ext = output_ext self.view_name = view_name self.error_context = error_context or StatikErrorContext() logger.debug( "Configured path for view \"%s\": %s", self.view_name, self )
MIT License
chaffelson/whoville
whoville/cloudbreak/models/workspace_response.py
WorkspaceResponse.description
python
def description(self, description): if description is not None and len(description) > 1000: raise ValueError("Invalid value for `description`, length must be less than or equal to `1000`") if description is not None and len(description) < 0: raise ValueError("Invalid value for `description`, length must be greater than or equal to `0`") self._description = description
Sets the description of this WorkspaceResponse. description of the resource :param description: The description of this WorkspaceResponse. :type: str
https://github.com/chaffelson/whoville/blob/f71fda629c9fd50d0a482120165ea5abcc754522/whoville/cloudbreak/models/workspace_response.py#L112-L125
from pprint import pformat from six import iteritems import re class WorkspaceResponse(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'name': 'str', 'description': 'str', 'users': 'list[UserWorkspacePermissionsJson]', 'status': 'str', 'id': 'int' } attribute_map = { 'name': 'name', 'description': 'description', 'users': 'users', 'status': 'status', 'id': 'id' } def __init__(self, name=None, description=None, users=None, status=None, id=None): self._name = None self._description = None self._users = None self._status = None self._id = None if name is not None: self.name = name if description is not None: self.description = description if users is not None: self.users = users if status is not None: self.status = status if id is not None: self.id = id @property def name(self): return self._name @name.setter def name(self, name): if name is not None and len(name) > 100: raise ValueError("Invalid value for `name`, length must be less than or equal to `100`") if name is not None and len(name) < 5: raise ValueError("Invalid value for `name`, length must be greater than or equal to `5`") if name is not None and not re.search('(^[a-z][-a-z0-9]*[a-z0-9]$)', name): raise ValueError("Invalid value for `name`, must be a follow pattern or equal to `/(^[a-z][-a-z0-9]*[a-z0-9]$)/`") self._name = name @property def description(self): return self._description @description.setter
Apache License 2.0
bloomberg/python-comdb2
comdb2/dbapi2.py
connect
python
def connect(*args, **kwargs): return Connection(*args, **kwargs)
Establish a connection to a Comdb2 database. All arguments are passed directly through to the `Connection` constructor. Note: DB-API 2.0 requires the module to expose `connect`, but not `Connection`. If portability across database modules is a concern, you should always use `connect` to create your connections rather than calling the `Connection` constructor directly. Returns: Connection: A handle for the newly established connection.
https://github.com/bloomberg/python-comdb2/blob/05da300c739bcc7e63036ab79f8552165954035b/comdb2/dbapi2.py#L552-L566
from __future__ import absolute_import, unicode_literals import functools import itertools import weakref import datetime import re import six from . import cdb2 __all__ = ['apilevel', 'threadsafety', 'paramstyle', 'connect', 'Connection', 'Cursor', 'STRING', 'BINARY', 'NUMBER', 'DATETIME', 'ROWID', 'Datetime', 'DatetimeUs', 'Binary', 'Timestamp', 'TimestampUs', 'DatetimeFromTicks', 'DatetimeUsFromTicks', 'TimestampFromTicks', 'Error', 'Warning', 'InterfaceError', 'DatabaseError', 'InternalError', 'OperationalError', 'ProgrammingError', 'IntegrityError', 'DataError', 'NotSupportedError', 'UniqueKeyConstraintError', 'ForeignKeyConstraintError', 'NonNullConstraintError'] apilevel = "2.0" threadsafety = 1 paramstyle = "pyformat" _FIRST_WORD_OF_STMT = re.compile( r""" (?: # match (without capturing) \s* # optional whitespace /\*.*?\*/ # then a C-style /* ... */ comment, possibly across lines | # or \s* # optional whitespace --[^\n]*\n # then a SQL-style comment terminated by a newline )* # repeat until all comments have been matched \s* # then skip over any whitespace (\w+) # and capture the first word """, re.VERBOSE | re.DOTALL | (0 if six.PY2 else re.ASCII), ) _VALID_SP_NAME = re.compile(r'^[A-Za-z0-9_.]+$') @functools.total_ordering class _TypeObject(object): def __init__(self, *value_names): self.value_names = value_names self.values = [cdb2.TYPE[v] for v in value_names] def __eq__(self, other): return other in self.values def __lt__(self, other): return self != other and other < self.values def __repr__(self): return 'TypeObject' + str(self.value_names) def _binary(string): if isinstance(string, six.text_type): return string.encode('utf-8') return bytes(string) STRING = _TypeObject('CSTRING') BINARY = _TypeObject('BLOB') NUMBER = _TypeObject('INTEGER', 'REAL') DATETIME = _TypeObject('DATETIME', 'DATETIMEUS') ROWID = STRING Datetime = datetime.datetime DatetimeUs = cdb2.DatetimeUs Binary = _binary Timestamp = Datetime TimestampUs = DatetimeUs DatetimeFromTicks = Datetime.fromtimestamp DatetimeUsFromTicks = DatetimeUs.fromtimestamp TimestampFromTicks = Timestamp.fromtimestamp TimestampUsFromTicks = TimestampUs.fromtimestamp try: UserException = StandardError except NameError: UserException = Exception class Error(UserException): pass class Warning(UserException): pass class InterfaceError(Error): pass class DatabaseError(Error): pass class InternalError(DatabaseError): pass class OperationalError(DatabaseError): pass class ProgrammingError(DatabaseError): pass class IntegrityError(DatabaseError): pass class UniqueKeyConstraintError(IntegrityError): pass class ForeignKeyConstraintError(IntegrityError): pass class NonNullConstraintError(IntegrityError): pass class DataError(DatabaseError): pass class NotSupportedError(DatabaseError): pass _EXCEPTION_BY_RC = { cdb2.ERROR_CODE['CONNECT_ERROR'] : OperationalError, cdb2.ERROR_CODE['NOTCONNECTED'] : ProgrammingError, cdb2.ERROR_CODE['PREPARE_ERROR'] : ProgrammingError, cdb2.ERROR_CODE['IO_ERROR'] : OperationalError, cdb2.ERROR_CODE['INTERNAL'] : InternalError, cdb2.ERROR_CODE['NOSTATEMENT'] : ProgrammingError, cdb2.ERROR_CODE['BADCOLUMN'] : ProgrammingError, cdb2.ERROR_CODE['BADSTATE'] : ProgrammingError, cdb2.ERROR_CODE['ASYNCERR'] : OperationalError, cdb2.ERROR_CODE['INVALID_ID'] : InternalError, cdb2.ERROR_CODE['RECORD_OUT_OF_RANGE'] : OperationalError, cdb2.ERROR_CODE['REJECTED'] : OperationalError, cdb2.ERROR_CODE['STOPPED'] : OperationalError, cdb2.ERROR_CODE['BADREQ'] : OperationalError, cdb2.ERROR_CODE['DBCREATE_FAILED'] : OperationalError, cdb2.ERROR_CODE['THREADPOOL_INTERNAL'] : OperationalError, cdb2.ERROR_CODE['READONLY'] : NotSupportedError, cdb2.ERROR_CODE['NOMASTER'] : InternalError, cdb2.ERROR_CODE['UNTAGGED_DATABASE'] : NotSupportedError, cdb2.ERROR_CODE['CONSTRAINTS'] : IntegrityError, cdb2.ERROR_CODE['DEADLOCK'] : OperationalError, cdb2.ERROR_CODE['TRAN_IO_ERROR'] : OperationalError, cdb2.ERROR_CODE['ACCESS'] : OperationalError, cdb2.ERROR_CODE['TRAN_MODE_UNSUPPORTED'] : NotSupportedError, cdb2.ERROR_CODE['VERIFY_ERROR'] : OperationalError, cdb2.ERROR_CODE['FKEY_VIOLATION'] : ForeignKeyConstraintError, cdb2.ERROR_CODE['NULL_CONSTRAINT'] : NonNullConstraintError, cdb2.ERROR_CODE['CONV_FAIL'] : DataError, cdb2.ERROR_CODE['NONKLESS'] : NotSupportedError, cdb2.ERROR_CODE['MALLOC'] : OperationalError, cdb2.ERROR_CODE['NOTSUPPORTED'] : NotSupportedError, cdb2.ERROR_CODE['DUPLICATE'] : UniqueKeyConstraintError, cdb2.ERROR_CODE['TZNAME_FAIL'] : DataError, cdb2.ERROR_CODE['UNKNOWN'] : OperationalError, } def _raise_wrapped_exception(exc): code = exc.error_code msg = '%s (cdb2api rc %d)' % (exc.error_message, code) if "null constraint violation" in msg: six.raise_from(NonNullConstraintError(msg), exc) six.raise_from(_EXCEPTION_BY_RC.get(code, OperationalError)(msg), exc) def _sql_operation(sql): match = _FIRST_WORD_OF_STMT.match(sql) if match: return match.group(1).lower() return None def _operation_ends_transaction(operation): return operation == 'commit' or operation == 'rollback' def _modifies_rows(operation): return operation in ('commit', 'insert', 'update', 'delete')
Apache License 2.0
argoproj-labs/argo-python-dsl
argo/workflows/dsl/_workflow_template.py
WorkflowTemplate.from_dict
python
def from_dict(cls, wf: Dict[str, Any], validate: bool = True) -> "WorkflowTemplate": return cls.from_string(json.dumps(wf), validate=validate)
Create a WorkflowTemplate from a dict.
https://github.com/argoproj-labs/argo-python-dsl/blob/36369bef276eb0910a6f218e20e0d571fbaa9beb/argo/workflows/dsl/_workflow_template.py#L291-L293
from abc import ABCMeta import logging import inspect import json import yaml import requests from inflection import dasherize from inflection import underscore from pathlib import Path from typing import Any from typing import Dict from typing import List from typing import Optional from typing import Set from typing import Tuple from typing import Type from typing import Union from argo.workflows.client import ApiClient from argo.workflows.client.models import V1alpha1Arguments from argo.workflows.client.models import V1alpha1Artifact from argo.workflows.client.models import V1alpha1DAGTask from argo.workflows.client.models import V1alpha1DAGTemplate from argo.workflows.client.models import V1alpha1Parameter from argo.workflows.client.models import V1alpha1Template from argo.workflows.client.models import V1alpha1ClusterWorkflowTemplate from argo.workflows.client.models import V1alpha1WorkflowTemplate from argo.workflows.client.models import V1alpha1WorkflowTemplateSpec from argo.workflows.client.models import V1ObjectMeta from . import _utils __all__ = ["WorkflowTemplate"] _LOGGER = logging.getLogger(__name__) class WorkflowTemplateMeta(ABCMeta): __model__ = V1alpha1WorkflowTemplate __kind__ = "WorkflowTemplate" def __new__( cls, name: Union[str, Type["WorkflowTemplate"]], bases: Tuple[Type["WorkflowTemplate"], ...], props: Dict[str, Any], **kwargs, ): workflow_name = dasherize(underscore(name)) props["kind"] = cls.__kind__ props["api_version"] = "argoproj.io/v1alpha1" metadata_dict = {"name": workflow_name} metadata_dict.update(props.get("__metadata__", {})) props["metadata"]: V1ObjectMeta = V1ObjectMeta(**metadata_dict) props["spec"] = { k: props.pop(k) for k in V1alpha1WorkflowTemplateSpec.attribute_map if props.get(k) } bases = (*bases, cls.__model__) klass = super().__new__(cls, name, bases, props) if name == cls.__kind__: return klass cls.__compile(klass, name, bases, props) return klass @classmethod def __compile( cls, klass: "WorkflowTemplate", name: str, bases: Tuple[Type["WorkflowTemplate"], ...], props: Dict[str, Any], **kwargs, ): tasks: List[V1alpha1DAGTask] = [] templates: List[V1alpha1Template] = [] scopes: Dict[str, List[Any]] = {} for key, prop in props.items(): scope = getattr(prop, "__scope__", None) if scope is None: continue scoped_objects = [prop] scoped_objects.extend(scopes.get(scope, [])) scopes[scope] = scoped_objects for key, prop in props.items(): model = getattr(prop, "__model__", None) if model is None: continue template: Optional[V1alpha1Template] = None if issubclass(model, V1alpha1Template): template = prop if hasattr(template, "__closure__") and template.script is not None: template = cls.__compile_closure(template, scopes) templates.append(template) elif issubclass(model, V1alpha1DAGTask): task = prop tasks.append(task) if tasks: main_template = V1alpha1Template(name="main") main_template.dag = V1alpha1DAGTemplate(tasks=tasks) templates.insert(0, main_template) spec_dict: dict = klass.spec spec_dict["entrypoint"] = spec_dict.get("entrypoint", "main") spec_dict["templates"] = templates klass.spec: V1alpha1WorkflowTemplateSpec = V1alpha1WorkflowTemplateSpec( **spec_dict ) @classmethod def __compile_closure( cls, template: V1alpha1Template, scopes: Dict[str, Any] = None ) -> V1alpha1Template: scopes = scopes or {} scope: str = template.__closure__ if scope is None: return template script: List[str] = [f"class {scope}:\n"] script.append(f' """Scoped objects injected from scope \'{scope}\'."""\n\n') scoped_objects = scopes.get(scope) or [] for so in scoped_objects: source, _ = inspect.getsourcelines(so.__get__(cls).__code__) for co_start, line in enumerate(source): if line.strip().startswith("def"): break source = [" @staticmethod\n"] + source[co_start:] + ["\n"] script.extend(source) script = script + [ "\n", *template.script.source.splitlines(keepends=True), ] import_lines: List[str] = [] source_lines: List[str] = [] import_in_previous_line = False for line in script: if "import " in line: import_lines.append(line.strip(" ")) import_in_previous_line = True else: is_blankline = not bool(line.strip()) if import_in_previous_line and is_blankline: pass else: source_lines.append(line) import_in_previous_line = False import_lines_with_from: Set[str] = set() import_lines_without_from: Set[str] = set() for line in import_lines: if "from " in line: import_lines_with_from.add(line) else: import_lines_without_from.add(line) import_lines = [ *sorted(import_lines_without_from), "\n", *sorted(import_lines_with_from), ] template.script.source = "".join((*import_lines, "\n", *source_lines)) return template class WorkflowTemplate(metaclass=WorkflowTemplateMeta): __model__ = V1alpha1WorkflowTemplate def __init__(self, compile=True): self._compiled_model: Union[V1alpha1WorkflowTemplate, None] = None self.__validated = False if compile: self.compile() def __hash__(self) -> str: return self.to_str().__hash__() @property def model(self) -> Union[V1alpha1WorkflowTemplate, None]: return self._compiled_model @model.setter def model(self, m: V1alpha1WorkflowTemplate): if not isinstance(m, self.__model__): raise TypeError(f"Expected type {self.__model__}, got: {type(m)}") self._compiled_model = m @property def name(self) -> Union[str, None]: return self.metadata.name @name.setter def name(self, name: str): self.metadata.name = name @property def validated(self) -> bool: return self.__validated @classmethod def from_file( cls, fp: Union[str, Path], validate: bool = True ) -> "WorkflowTemplate": wf_path = Path(fp) wf: Dict[str, Any] = yaml.safe_load(wf_path.read_text()) return cls.from_dict(wf, validate=validate) @classmethod def from_url(cls, url: str, validate: bool = True) -> "WorkflowTemplate": resp = requests.get(url) resp.raise_for_status() wf: Dict[str, Any] = yaml.safe_load(resp.text) return cls.from_dict(wf, validate=validate) @classmethod
Apache License 2.0
wildmeorg/wildbook-ia
wbia/control/manual_image_funcs.py
set_image_time_posix
python
def set_image_time_posix( ibs, gid_list, image_time_posix_list, duplicate_behavior='error' ): id_iter = gid_list colnames = (IMAGE_TIME_POSIX,) ibs.db.set( const.IMAGE_TABLE, colnames, image_time_posix_list, id_iter, duplicate_behavior=duplicate_behavior, )
r""" image_time_posix_list -> image.image_time_posix[gid_list] SeeAlso: set_image_unixtime Args: gid_list image_time_posix_list TemplateInfo: Tsetter_native_column tbl = image col = image_time_posix RESTful: Method: PUT URL: /api/image/time/posix/
https://github.com/wildmeorg/wildbook-ia/blob/017057cfd3a2a7ea22f575842c9473e121c66ea4/wbia/control/manual_image_funcs.py#L906-L936
import logging from wbia import constants as const from wbia.control import accessor_decors, controller_inject from wbia.control.controller_inject import make_ibs_register_decorator from os.path import join, exists, isabs import numpy as np import utool as ut import vtool as vt from wbia.web import routes_ajax from wbia.utils import call_houston print, rrr, profile = ut.inject2(__name__) logger = logging.getLogger('wbia') DEBUG_THUMB = False CLASS_INJECT_KEY, register_ibs_method = make_ibs_register_decorator(__name__) register_api = controller_inject.get_wbia_flask_api(__name__) IMAGE_TIME_POSIX = 'image_time_posix' IMAGE_LOCATION_CODE = 'image_location_code' IMAGE_TIMEDELTA_POSIX = 'image_timedelta_posix' PARTY_ROWID = 'party_rowid' CONTRIBUTOR_ROWID = 'contributor_rowid' ANNOT_ROWID = 'annot_rowid' ANNOT_ROWIDS = 'annot_rowids' IMAGE_ROWID = 'image_rowid' IMAGE_COLNAMES = ( 'image_uuid', 'image_uri', 'image_uri_original', 'image_original_name', 'image_ext', 'image_width', 'image_height', 'image_time_posix', 'image_gps_lat', 'image_gps_lon', 'image_orientation', 'image_note', ) @register_ibs_method @accessor_decors.ider def _get_all_gids(ibs): all_gids = ibs._get_all_image_rowids() return all_gids @register_ibs_method def _get_all_image_rowids(ibs): all_image_rowids = ibs.db.get_all_rowids(const.IMAGE_TABLE) return all_image_rowids @register_ibs_method @accessor_decors.ider @register_api('/api/image/', methods=['GET']) def get_valid_gids( ibs, imgsetid=None, imgsetid_list=(), require_unixtime=False, require_gps=None, reviewed=None, **kwargs, ): if imgsetid is None and not imgsetid_list: gid_list = ibs._get_all_gids() elif imgsetid_list: gid_list = ibs.get_imageset_gids(imgsetid_list) else: assert not ut.isiterable(imgsetid) gid_list = ibs.get_imageset_gids(imgsetid) if require_unixtime: unixtime_list = ibs.get_image_unixtime(gid_list, **kwargs) isvalid_list = [unixtime != -1 for unixtime in unixtime_list] gid_list = ut.compress(gid_list, isvalid_list) if require_gps: isvalid_gps = [ lat != -1 and lon != -1 for lat, lon in ibs.get_image_gps(gid_list) ] gid_list = ut.compress(gid_list, isvalid_gps) if reviewed is not None: reviewed_list = ibs.get_image_reviewed(gid_list) isvalid_list = [reviewed == flag for flag in reviewed_list] gid_list = ut.compress(gid_list, isvalid_list) return gid_list @register_ibs_method @register_api('/api/image/<rowid>/', methods=['GET']) def image_base64_api(rowid=None, thumbnail=False, fresh=False, **kwargs): return routes_ajax.image_src(rowid, thumbnail=thumbnail, fresh=fresh, **kwargs) @register_ibs_method @accessor_decors.getter_1to1 def get_image_gid(ibs, gid_list, eager=True, nInput=None): id_iter = gid_list colnames = (IMAGE_ROWID,) gid_list = ibs.db.get( const.IMAGE_TABLE, colnames, id_iter, id_colname='rowid', eager=eager, nInput=nInput, ) return gid_list @register_ibs_method @register_api('/api/image/dict/', methods=['GET']) def get_image_gids_with_aids(ibs, gid_list=None): if gid_list is None: gid_list = sorted(ibs.get_valid_gids()) aids_list = ibs.get_image_aids(gid_list) zipped = zip(gid_list, aids_list) combined_dict = {gid: aid_list for gid, aid_list in zipped} return combined_dict @register_ibs_method @accessor_decors.ider def get_valid_image_rowids(ibs, imgsetid=None, require_unixtime=False, reviewed=None): return get_valid_gids(ibs, imgsetid, require_unixtime, reviewed) @register_ibs_method def get_num_images(ibs, **kwargs): gid_list = ibs.get_valid_gids(**kwargs) return len(gid_list) @register_ibs_method def _compute_image_uuids(ibs, gpath_list, sanitize=True, ensure=True, **kwargs): from wbia.algo.preproc import preproc_image from wbia.other import ibsfuncs if sanitize: gpath_list = ibsfuncs.ensure_unix_gpaths(gpath_list) force_serial = ibs.force_serial or ibs.production params_list = list( ut.generate2( preproc_image.parse_imageinfo, list(zip(gpath_list)), nTasks=len(gpath_list), ordered=True, force_serial=force_serial, futures_threaded=True, ) ) failed_list = [ gpath for (gpath, params_) in zip(gpath_list, params_list) if not params_ ] logger.info( '\n'.join([' ! Failed reading gpath=%r' % (gpath,) for gpath in failed_list]) ) if ensure and len(failed_list) > 0: logger.info('Importing %d files failed: %r' % (len(failed_list), failed_list)) return params_list @register_ibs_method @register_api('/api/image/uuid/', methods=['POST']) def compute_image_uuids(ibs, gpath_list, **kwargs): params_list = ibs._compute_image_uuids(gpath_list, **kwargs) uuid_colx = IMAGE_COLNAMES.index('image_uuid') uuid_list = [ None if params_ is None else params_[uuid_colx] for params_ in params_list ] return uuid_list @register_ibs_method @accessor_decors.adder @accessor_decors.cache_invalidator(const.IMAGESET_TABLE, ['percent_imgs_reviewed_str']) @register_api('/api/image/', methods=['POST']) def add_images( ibs, gpath_list, params_list=None, as_annots=False, auto_localize=None, location_for_names=None, ensure_unique=False, ensure_loadable=True, ensure_exif=True, **kwargs, ): logger.info('[ibs] add_images') logger.info('[ibs] len(gpath_list) = %d' % len(gpath_list)) if auto_localize is None: auto_localize = ibs.cfg.other_cfg.auto_localize location_for_names = None if location_for_names is None: location_for_names = ibs.cfg.other_cfg.location_for_names compute_params = params_list is None if compute_params: params_list = ibs._compute_image_uuids(gpath_list, **kwargs) debug = False if debug: uuid_colx = IMAGE_COLNAMES.index('image_uuid') uuid_list = [ None if params_ is None else params_[uuid_colx] for params_ in params_list ] gid_list_ = ibs.get_image_gids_from_uuid(uuid_list) valid_gids = ibs.get_valid_gids() valid_uuids = ibs.get_image_uuids(valid_gids) logger.info('[preadd] uuid / gid_ = ' + ut.indentjoin(zip(uuid_list, gid_list_))) logger.info( '[preadd] valid uuid / gid = ' + ut.indentjoin(zip(valid_uuids, valid_gids)) ) colnames = IMAGE_COLNAMES + ('image_original_path', 'image_location_code') params_list = [ tuple(params) + (gpath, location_for_names) if params is not None else None for params, gpath in zip(params_list, gpath_list) ] all_gid_list = ibs.db.add_cleanly( const.IMAGE_TABLE, colnames, params_list, ibs.get_image_gids_from_uuid ) none_set = set([None]) all_gid_set = set(all_gid_list) all_valid_gid_set = all_gid_set - none_set all_valid_gid_list = list(all_valid_gid_set) if auto_localize: ibs.localize_images(all_valid_gid_list) has_duplicates = ut.duplicates_exist(all_gid_list) if ensure_unique and has_duplicates: debug_gpath_list = ibs.get_image_paths(all_gid_list) debug_guuid_list = ibs.get_image_uuids(all_gid_list) debug_gext_list = ibs.get_image_exts(all_gid_list) ut.debug_duplicate_items( all_gid_list, debug_gpath_list, debug_guuid_list, debug_gext_list ) if ensure_loadable or ensure_exif: valid_gpath_list = ibs.get_image_paths(all_valid_gid_list) bad_load_list, bad_exif_list = ibs.check_image_loadable(all_valid_gid_list) bad_load_set = set(bad_load_list) bad_exif_set = set(bad_exif_list) delete_gid_set = set([]) for valid_gid, valid_gpath in zip(all_valid_gid_list, valid_gpath_list): if ensure_loadable and valid_gid in bad_load_set: logger.info( 'Loadable Image Validation: Failed to load %r' % (valid_gpath,) ) delete_gid_set.add(valid_gid) if ensure_exif and valid_gid in bad_exif_set: logger.info( 'Loadable EXIF Validation: Failed to load %r' % (valid_gpath,) ) delete_gid_set.add(valid_gid) delete_gid_list = list(delete_gid_set) ibs.delete_images(delete_gid_list, trash_images=False) all_valid_gid_set = all_gid_set - delete_gid_set - none_set all_valid_gid_list = list(all_valid_gid_set) if not compute_params: guuid_list = ibs.get_image_uuids(all_gid_list) guuid_list_ = ibs.compute_image_uuids(gpath_list) assert guuid_list == guuid_list_ if as_annots: aid_list = ibs.use_images_as_annotations(all_valid_gid_list) logger.info('[ibs] added %d annotations' % (len(aid_list),)) assert None not in all_valid_gid_set all_gid_list = [aid if aid in all_valid_gid_set else None for aid in all_gid_list] assert len(gpath_list) == len(all_gid_list) return all_gid_list @register_ibs_method def get_image_exif_original(ibs, gid_list): import vtool.exif as vtexif from PIL import Image gpath_list = ibs.get_image_paths(gid_list) exif_dict_list = [] for gpath in gpath_list: with Image.open(gpath, 'r') as pil_img: exif_dict = vtexif.get_exif_dict(pil_img) exif_dict_list.append(exif_dict) return exif_dict_list @register_ibs_method def localize_images(ibs, gid_list_=None): import requests import urllib urlsplit = urllib.parse.urlsplit urlquote = urllib.parse.quote urlunquote = urllib.parse.unquote if gid_list_ is None: logger.info('WARNING: you are localizing all gids') gid_list_ = ibs.get_valid_gids() isvalid_list = [gid is not None for gid in gid_list_] gid_list = ut.unique(ut.compress(gid_list_, isvalid_list)) uri_list = ibs.get_image_uris(gid_list) url_protos = ['https://', 'http://'] s3_proto = ['s3://'] houston_proto = ['houston+'] valid_protos = s3_proto + url_protos + houston_proto def isproto(uri, valid_protos): return any(uri.startswith(proto) for proto in valid_protos) def islocal(uri): return not (isabs(uri) and isproto(uri, valid_protos)) guuid_list = ibs.get_image_uuids(gid_list) gext_list = ibs.get_image_exts(gid_list) guuid_strs = (str(guuid) for guuid in guuid_list) loc_gname_list = [guuid + ext for (guuid, ext) in zip(guuid_strs, gext_list)] loc_gpath_list = [join(ibs.imgdir, gname) for gname in loc_gname_list] for uri, loc_gpath in zip(uri_list, loc_gpath_list): logger.info('Localizing %r -> %r' % (uri, loc_gpath)) if isproto(uri, valid_protos): if isproto(uri, s3_proto): logger.info('\tAWS S3 Fetch') s3_dict = ut.s3_str_decode_to_dict(uri) ut.grab_s3_contents(loc_gpath, **s3_dict) elif isproto(uri, url_protos): logger.info('\tURL Download') uri_ = urlunquote(uri) uri_ = urlsplit(uri_, allow_fragments=False) uri_path = urlquote(uri_.path.encode('utf8')) uri_ = uri_._replace(path=uri_path) uri_ = uri_.geturl() try: response = requests.get(uri_, stream=True, allow_redirects=True) assert ( response.status_code == 200 ), '200 code not received on download' except Exception: parts = urlsplit(uri_, allow_fragments=False) uri_ = uri_[len('%s://' % (parts.scheme,)) :] hostname = urlquote(parts.hostname.encode('utf8')) if parts.port: hostname = f'{hostname}:{parts.port}' uri_ = '%s://%s%s' % (parts.scheme, hostname, parts.path) response = requests.get(uri_, stream=True, allow_redirects=True) assert ( response.status_code == 200 ), '200 code not received on download' with open(loc_gpath, 'wb') as temp_file_: for chunk in response.iter_content(1024): temp_file_.write(chunk) elif isproto(uri, houston_proto): response = call_houston(uri) assert ( response.status_code == 200 ), f'200 code not received on download: {uri}' with open(loc_gpath, 'wb') as temp_file_: for chunk in response.iter_content(1024): temp_file_.write(chunk) else: raise ValueError('Sanity check failed') else: if not exists(loc_gpath): logger.info('\tIO Copy') uri if islocal(uri) else join(ibs.imgdir, uri) ut.copy_list([uri], [loc_gpath]) else: logger.info('\tSkipping (already localized)') ibs.set_image_uris(gid_list, loc_gname_list) assert all(map(exists, loc_gpath_list)), 'not all images copied' @register_ibs_method @accessor_decors.setter @register_api('/api/image/uri/', methods=['PUT']) def set_image_uris(ibs, gid_list, new_gpath_list): id_iter = ((gid,) for gid in gid_list) val_list = ((new_gpath,) for new_gpath in new_gpath_list) ibs.db.set(const.IMAGE_TABLE, ('image_uri',), val_list, id_iter) @register_ibs_method @accessor_decors.setter @register_api('/api/image/uri/original/', methods=['PUT']) def set_image_uris_original(ibs, gid_list, new_gpath_list, overwrite=False): if overwrite: gid_list_ = gid_list new_gpath_list_ = new_gpath_list else: current_uri_original_list = ibs.get_image_uris_original(gid_list) valid_flags = [ current is None or len(current) == 0 for current in current_uri_original_list ] invalid_flags = ut.not_list(valid_flags) nInvalid = sum(invalid_flags) if nInvalid > 0: logger.info( '[ibs] WARNING: Preventing overwrite of %d original uris' % (nInvalid,) ) new_gpath_list_ = ut.compress(new_gpath_list, valid_flags) gid_list_ = ut.compress(gid_list, valid_flags) id_iter = ((gid,) for gid in gid_list_) val_list = ((new_gpath,) for new_gpath in new_gpath_list_) ibs.db.set(const.IMAGE_TABLE, ('image_uri_original',), val_list, id_iter) @register_ibs_method @accessor_decors.setter @register_api('/api/image/contributor/rowid/', methods=['PUT']) def set_image_contributor_rowid(ibs, gid_list, contributor_rowid_list, **kwargs): id_iter = ((gid,) for gid in gid_list) val_list = ((contributor_rowid,) for contributor_rowid in contributor_rowid_list) ibs.db.set(const.IMAGE_TABLE, ('contributor_rowid',), val_list, id_iter, **kwargs) @register_ibs_method @accessor_decors.setter @accessor_decors.cache_invalidator(const.IMAGESET_TABLE, ['percent_imgs_reviewed_str']) @register_api('/api/image/reviewed/', methods=['PUT']) def set_image_reviewed(ibs, gid_list, reviewed_list): id_iter = ((gid,) for gid in gid_list) val_list = ((reviewed,) for reviewed in reviewed_list) ibs.db.set(const.IMAGE_TABLE, ('image_toggle_reviewed',), val_list, id_iter) @register_ibs_method @accessor_decors.setter def set_image_enabled(ibs, gid_list, enabled_list): id_iter = ((gid,) for gid in gid_list) val_list = ((enabled,) for enabled in enabled_list) ibs.db.set(const.IMAGE_TABLE, ('image_toggle_enabled',), val_list, id_iter) @register_ibs_method @accessor_decors.setter def set_image_cameratrap(ibs, gid_list, cameratrap_list): id_iter = ((gid,) for gid in gid_list) valid_set = set([False, True, None]) valid_list = [cameratrap in valid_set for cameratrap in cameratrap_list] assert False not in valid_list val_list = ((cameratrap,) for cameratrap in cameratrap_list) ibs.db.set(const.IMAGE_TABLE, ('image_toggle_cameratrap',), val_list, id_iter) @register_ibs_method @accessor_decors.setter @register_api('/api/image/note/', methods=['PUT']) def set_image_notes(ibs, gid_list, notes_list): id_iter = ((gid,) for gid in gid_list) val_list = ((notes,) for notes in notes_list) ibs.db.set(const.IMAGE_TABLE, ('image_note',), val_list, id_iter) @register_ibs_method @accessor_decors.setter @register_api('/api/image/metadata/', methods=['PUT']) def set_image_metadata(ibs, gid_list, metadata_dict_list): id_iter = ((gid,) for gid in gid_list) metadata_str_list = [] for metadata_dict in metadata_dict_list: metadata_str = ut.to_json(metadata_dict) metadata_str_list.append(metadata_str) val_list = ((metadata_str,) for metadata_str in metadata_str_list) ibs.db.set(const.IMAGE_TABLE, ('image_metadata_json',), val_list, id_iter) @register_ibs_method @accessor_decors.setter @register_api('/api/image/unixtime/', methods=['PUT']) def set_image_unixtime(ibs, gid_list, unixtime_list, duplicate_behavior='error'): id_iter = ((gid,) for gid in gid_list) val_list = ((unixtime,) for unixtime in unixtime_list) ibs.db.set( const.IMAGE_TABLE, (IMAGE_TIME_POSIX,), val_list, id_iter, duplicate_behavior=duplicate_behavior, ) @register_ibs_method @register_api('/api/image/time/posix/', methods=['PUT'])
Apache License 2.0
berdario/pew
pew/pew.py
shell_config_cmd
python
def shell_config_cmd(argv): shell = supported_shell() if shell: print(pew_site / 'shell_config' / ('init.' + shell)) else: err('Completions and prompts are unavailable for %s' % repr(os.environ.get('SHELL', '')))
Prints the path for the current $SHELL helper file
https://github.com/berdario/pew/blob/24a4b7d6fa760d34a7348686c1ce1fd5e72847ba/pew/pew.py#L85-L92
from __future__ import print_function, absolute_import, unicode_literals import os import sys import argparse import shutil import random import textwrap from functools import partial from subprocess import CalledProcessError from pathlib import Path try: from shutil import get_terminal_size except ImportError: from backports.shutil_get_terminal_size import get_terminal_size windows = sys.platform == 'win32' from clonevirtualenv import clone_virtualenv if not windows: try: from pythonz.commands.install import InstallCommand from pythonz.commands.uninstall import UninstallCommand from pythonz.installer.pythoninstaller import PythonInstaller, AlreadyInstalledError from pythonz.commands.list import ListCommand from pythonz.define import PATH_PYTHONS from pythonz.commands.locate import LocateCommand as LocatePython def ListPythons(): try: Path(PATH_PYTHONS).mkdir(parents=True) except OSError: pass return ListCommand() except: InstallCommand = ListPythons = LocatePython = UninstallCommand = lambda : sys.exit('You need to install the pythonz extra. pip install pew[pythonz]') else: InstallCommand = ListPythons = LocatePython = UninstallCommand = lambda : sys.exit('Command not supported on this platform') import shellingham from pew._utils import (check_call, invoke, expandpath, own, env_bin_dir, check_path, temp_environ, NamedTemporaryFile, to_unicode) from pew._print_utils import print_virtualenvs if sys.version_info[0] == 2: input = raw_input err = partial(print, file=sys.stderr) if windows: default_home = '~/.virtualenvs' else: default_home = os.path.join( os.environ.get('XDG_DATA_HOME', '~/.local/share'), 'virtualenvs') workon_home = expandpath( os.environ.get('WORKON_HOME', default_home)) def makedirs_and_symlink_if_needed(workon_home): if not workon_home.exists() and own(workon_home): workon_home.mkdir(parents=True) link = expandpath('~/.virtualenvs') if os.name == 'posix' and 'WORKON_HOME' not in os.environ and 'XDG_DATA_HOME' not in os.environ and not link.exists(): link.symlink_to(str(workon_home)) return True else: return False pew_site = Path(__file__).parent def supported_shell(): shell = Path(os.environ.get('SHELL', '')).stem if shell in ('bash', 'zsh', 'fish'): return shell
MIT License
ultrabug/uhashring
uhashring/ring_ketama.py
KetamaRing._remove_node
python
def _remove_node(self, node_name): try: self._nodes.pop(node_name) except Exception: raise KeyError( "node '{}' not found, available nodes: {}".format( node_name, self._nodes.keys() ) ) else: self._create_ring(self._nodes)
Remove the given node from the continuum/ring. :param node_name: the node name.
https://github.com/ultrabug/uhashring/blob/c6b6888a21141874a287f3ed5e0df7c518962858/uhashring/ring_ketama.py#L66-L80
from bisect import insort from collections import Counter from hashlib import md5 class KetamaRing: def __init__(self, replicas=4): self._distribution = Counter() self._keys = [] self._nodes = {} self._replicas = replicas self._ring = {} self._listbytes = lambda x: x def hashi(self, key, replica=0): dh = self._listbytes(md5(str(key).encode("utf-8")).digest()) rd = replica * 4 return (dh[3 + rd] << 24) | (dh[2 + rd] << 16) | (dh[1 + rd] << 8) | dh[0 + rd] def _hashi_weight_generator(self, node_name, node_conf): ks = ( node_conf["vnodes"] * len(self._nodes) * node_conf["weight"] ) // self._weight_sum for w in range(0, ks): w_node_name = f"{node_name}-{w}" for i in range(0, self._replicas): yield self.hashi(w_node_name, replica=i) @staticmethod def _listbytes(data): return map(ord, data) def _create_ring(self, nodes): _weight_sum = 0 for node_conf in self._nodes.values(): _weight_sum += node_conf["weight"] self._weight_sum = _weight_sum _distribution = Counter() _keys = [] _ring = {} for node_name, node_conf in self._nodes.items(): for h in self._hashi_weight_generator(node_name, node_conf): _ring[h] = node_name insort(_keys, h) _distribution[node_name] += 1 self._distribution = _distribution self._keys = _keys self._ring = _ring
BSD 3-Clause New or Revised License
zachchristensen28/ta-opnsense
bin/ta_opnsense/aob_py3/future/backports/urllib/request.py
HTTPRedirectHandler.redirect_request
python
def redirect_request(self, req, fp, code, msg, headers, newurl): m = req.get_method() if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD") or code in (301, 302, 303) and m == "POST")): raise HTTPError(req.full_url, code, msg, headers, fp) newurl = newurl.replace(' ', '%20') CONTENT_HEADERS = ("content-length", "content-type") newheaders = dict((k, v) for k, v in req.headers.items() if k.lower() not in CONTENT_HEADERS) return Request(newurl, headers=newheaders, origin_req_host=req.origin_req_host, unverifiable=True)
Return a Request or None in response to a redirect. This is called by the http_error_30x methods when a redirection response is received. If a redirection should take place, return a new Request to allow http_error_30x to perform the redirect. Otherwise, raise HTTPError if no-one else should try to handle this url. Return None if you can't but another Handler might.
https://github.com/zachchristensen28/ta-opnsense/blob/fc736f4c6f0fa7866b4f6d2dcf9761b6b693d6cf/bin/ta_opnsense/aob_py3/future/backports/urllib/request.py#L636-L664
from __future__ import absolute_import, division, print_function, unicode_literals from future.builtins import bytes, dict, filter, input, int, map, open, str from future.utils import PY2, PY3, raise_with_traceback import base64 import bisect import hashlib import array from future.backports import email from future.backports.http import client as http_client from .error import URLError, HTTPError, ContentTooShortError from .parse import ( urlparse, urlsplit, urljoin, unwrap, quote, unquote, splittype, splithost, splitport, splituser, splitpasswd, splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse) from .response import addinfourl, addclosehook import io import os import posixpath import re import socket import sys import time import tempfile import contextlib import warnings from future.utils import PY2 if PY2: from collections import Iterable else: from collections.abc import Iterable try: import ssl from ssl import SSLContext except ImportError: _have_ssl = False else: _have_ssl = True __all__ = [ 'Request', 'OpenerDirector', 'BaseHandler', 'HTTPDefaultErrorHandler', 'HTTPRedirectHandler', 'HTTPCookieProcessor', 'ProxyHandler', 'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm', 'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler', 'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler', 'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler', 'UnknownHandler', 'HTTPErrorProcessor', 'urlopen', 'install_opener', 'build_opener', 'pathname2url', 'url2pathname', 'getproxies', 'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener', ] __version__ = sys.version[:3] _opener = None def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **_3to2kwargs): if 'cadefault' in _3to2kwargs: cadefault = _3to2kwargs['cadefault']; del _3to2kwargs['cadefault'] else: cadefault = False if 'capath' in _3to2kwargs: capath = _3to2kwargs['capath']; del _3to2kwargs['capath'] else: capath = None if 'cafile' in _3to2kwargs: cafile = _3to2kwargs['cafile']; del _3to2kwargs['cafile'] else: cafile = None global _opener if cafile or capath or cadefault: if not _have_ssl: raise ValueError('SSL support not available') context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) context.options |= ssl.OP_NO_SSLv2 context.verify_mode = ssl.CERT_REQUIRED if cafile or capath: context.load_verify_locations(cafile, capath) else: context.set_default_verify_paths() https_handler = HTTPSHandler(context=context, check_hostname=True) opener = build_opener(https_handler) elif _opener is None: _opener = opener = build_opener() else: opener = _opener return opener.open(url, data, timeout) def install_opener(opener): global _opener _opener = opener _url_tempfiles = [] def urlretrieve(url, filename=None, reporthook=None, data=None): url_type, path = splittype(url) with contextlib.closing(urlopen(url, data)) as fp: headers = fp.info() if url_type == "file" and not filename: return os.path.normpath(path), headers if filename: tfp = open(filename, 'wb') else: tfp = tempfile.NamedTemporaryFile(delete=False) filename = tfp.name _url_tempfiles.append(filename) with tfp: result = filename, headers bs = 1024*8 size = -1 read = 0 blocknum = 0 if "content-length" in headers: size = int(headers["Content-Length"]) if reporthook: reporthook(blocknum, bs, size) while True: block = fp.read(bs) if not block: break read += len(block) tfp.write(block) blocknum += 1 if reporthook: reporthook(blocknum, bs, size) if size >= 0 and read < size: raise ContentTooShortError( "retrieval incomplete: got only %i out of %i bytes" % (read, size), result) return result def urlcleanup(): for temp_file in _url_tempfiles: try: os.unlink(temp_file) except EnvironmentError: pass del _url_tempfiles[:] global _opener if _opener: _opener = None if PY3: _cut_port_re = re.compile(r":\d+$", re.ASCII) else: _cut_port_re = re.compile(r":\d+$") def request_host(request): url = request.full_url host = urlparse(url)[1] if host == "": host = request.get_header("Host", "") host = _cut_port_re.sub("", host, 1) return host.lower() class Request(object): def __init__(self, url, data=None, headers={}, origin_req_host=None, unverifiable=False, method=None): self.full_url = unwrap(url) self.full_url, self.fragment = splittag(self.full_url) self.data = data self.headers = {} self._tunnel_host = None for key, value in headers.items(): self.add_header(key, value) self.unredirected_hdrs = {} if origin_req_host is None: origin_req_host = request_host(self) self.origin_req_host = origin_req_host self.unverifiable = unverifiable self.method = method self._parse() def _parse(self): self.type, rest = splittype(self.full_url) if self.type is None: raise ValueError("unknown url type: %r" % self.full_url) self.host, self.selector = splithost(rest) if self.host: self.host = unquote(self.host) def get_method(self): if self.method is not None: return self.method elif self.data is not None: return "POST" else: return "GET" def get_full_url(self): if self.fragment: return '%s#%s' % (self.full_url, self.fragment) else: return self.full_url def add_data(self, data): msg = "Request.add_data method is deprecated." warnings.warn(msg, DeprecationWarning, stacklevel=1) self.data = data def has_data(self): msg = "Request.has_data method is deprecated." warnings.warn(msg, DeprecationWarning, stacklevel=1) return self.data is not None def get_data(self): msg = "Request.get_data method is deprecated." warnings.warn(msg, DeprecationWarning, stacklevel=1) return self.data def get_type(self): msg = "Request.get_type method is deprecated." warnings.warn(msg, DeprecationWarning, stacklevel=1) return self.type def get_host(self): msg = "Request.get_host method is deprecated." warnings.warn(msg, DeprecationWarning, stacklevel=1) return self.host def get_selector(self): msg = "Request.get_selector method is deprecated." warnings.warn(msg, DeprecationWarning, stacklevel=1) return self.selector def is_unverifiable(self): msg = "Request.is_unverifiable method is deprecated." warnings.warn(msg, DeprecationWarning, stacklevel=1) return self.unverifiable def get_origin_req_host(self): msg = "Request.get_origin_req_host method is deprecated." warnings.warn(msg, DeprecationWarning, stacklevel=1) return self.origin_req_host def set_proxy(self, host, type): if self.type == 'https' and not self._tunnel_host: self._tunnel_host = self.host else: self.type= type self.selector = self.full_url self.host = host def has_proxy(self): return self.selector == self.full_url def add_header(self, key, val): self.headers[key.capitalize()] = val def add_unredirected_header(self, key, val): self.unredirected_hdrs[key.capitalize()] = val def has_header(self, header_name): return (header_name in self.headers or header_name in self.unredirected_hdrs) def get_header(self, header_name, default=None): return self.headers.get( header_name, self.unredirected_hdrs.get(header_name, default)) def header_items(self): hdrs = self.unredirected_hdrs.copy() hdrs.update(self.headers) return list(hdrs.items()) class OpenerDirector(object): def __init__(self): client_version = "Python-urllib/%s" % __version__ self.addheaders = [('User-agent', client_version)] self.handlers = [] self.handle_open = {} self.handle_error = {} self.process_response = {} self.process_request = {} def add_handler(self, handler): if not hasattr(handler, "add_parent"): raise TypeError("expected BaseHandler instance, got %r" % type(handler)) added = False for meth in dir(handler): if meth in ["redirect_request", "do_open", "proxy_open"]: continue i = meth.find("_") protocol = meth[:i] condition = meth[i+1:] if condition.startswith("error"): j = condition.find("_") + i + 1 kind = meth[j+1:] try: kind = int(kind) except ValueError: pass lookup = self.handle_error.get(protocol, {}) self.handle_error[protocol] = lookup elif condition == "open": kind = protocol lookup = self.handle_open elif condition == "response": kind = protocol lookup = self.process_response elif condition == "request": kind = protocol lookup = self.process_request else: continue handlers = lookup.setdefault(kind, []) if handlers: bisect.insort(handlers, handler) else: handlers.append(handler) added = True if added: bisect.insort(self.handlers, handler) handler.add_parent(self) def close(self): pass def _call_chain(self, chain, kind, meth_name, *args): handlers = chain.get(kind, ()) for handler in handlers: func = getattr(handler, meth_name) result = func(*args) if result is not None: return result def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT): if isinstance(fullurl, bytes): fullurl = fullurl.decode() if isinstance(fullurl, str): req = Request(fullurl, data) else: req = fullurl if data is not None: req.data = data req.timeout = timeout protocol = req.type meth_name = protocol+"_request" for processor in self.process_request.get(protocol, []): meth = getattr(processor, meth_name) req = meth(req) response = self._open(req, data) meth_name = protocol+"_response" for processor in self.process_response.get(protocol, []): meth = getattr(processor, meth_name) response = meth(req, response) return response def _open(self, req, data=None): result = self._call_chain(self.handle_open, 'default', 'default_open', req) if result: return result protocol = req.type result = self._call_chain(self.handle_open, protocol, protocol + '_open', req) if result: return result return self._call_chain(self.handle_open, 'unknown', 'unknown_open', req) def error(self, proto, *args): if proto in ('http', 'https'): dict = self.handle_error['http'] proto = args[2] meth_name = 'http_error_%s' % proto http_err = 1 orig_args = args else: dict = self.handle_error meth_name = proto + '_error' http_err = 0 args = (dict, proto, meth_name) + args result = self._call_chain(*args) if result: return result if http_err: args = (dict, 'default', 'http_error_default') + orig_args return self._call_chain(*args) def build_opener(*handlers): def isclass(obj): return isinstance(obj, type) or hasattr(obj, "__bases__") opener = OpenerDirector() default_classes = [ProxyHandler, UnknownHandler, HTTPHandler, HTTPDefaultErrorHandler, HTTPRedirectHandler, FTPHandler, FileHandler, HTTPErrorProcessor] if hasattr(http_client, "HTTPSConnection"): default_classes.append(HTTPSHandler) skip = set() for klass in default_classes: for check in handlers: if isclass(check): if issubclass(check, klass): skip.add(klass) elif isinstance(check, klass): skip.add(klass) for klass in skip: default_classes.remove(klass) for klass in default_classes: opener.add_handler(klass()) for h in handlers: if isclass(h): h = h() opener.add_handler(h) return opener class BaseHandler(object): handler_order = 500 def add_parent(self, parent): self.parent = parent def close(self): pass def __lt__(self, other): if not hasattr(other, "handler_order"): return True return self.handler_order < other.handler_order class HTTPErrorProcessor(BaseHandler): handler_order = 1000 def http_response(self, request, response): code, msg, hdrs = response.code, response.msg, response.info() if not (200 <= code < 300): response = self.parent.error( 'http', request, response, code, msg, hdrs) return response https_response = http_response class HTTPDefaultErrorHandler(BaseHandler): def http_error_default(self, req, fp, code, msg, hdrs): raise HTTPError(req.full_url, code, msg, hdrs, fp) class HTTPRedirectHandler(BaseHandler): max_repeats = 4 max_redirections = 10
MIT License
googleclouddataproc/jupyterhub-dataprocspawner
dataprocspawner/spawner.py
DataprocSpawner.get_cluster_definition
python
def get_cluster_definition(self, file_path): config_string = self.read_gcs_file(file_path) config_dict = yaml.load(config_string, Loader=yaml.FullLoader) config_dict.setdefault('config', {}) skip_properties = {} skip_metadata = {} if 'properties' in config_dict['config'].setdefault('softwareConfig', {}): skip_properties = config_dict['config']['softwareConfig']['properties'] del config_dict['config']['softwareConfig']['properties'] if 'metadata' in config_dict['config'].setdefault('gceClusterConfig', {}): skip_metadata = config_dict['config']['gceClusterConfig']['metadata'] del config_dict['config']['gceClusterConfig']['metadata'] config_string = yaml.dump(config_dict) config_string = self.camelcase_to_snakecase(config_string) config_dict = yaml.load(config_string, Loader=yaml.FullLoader) if skip_properties: config_dict['config']['software_config']['properties'] = skip_properties if skip_metadata: config_dict['config']['gce_cluster_config']['metadata'] = skip_metadata self.log.debug(f'config_dict is {config_dict}') return config_dict
Returns the content of a GCS file Usage: file_path('mybucket/subfolder/filename.yaml'). Make sure that there is not comment in the yaml file. Find Dataproc properties here: https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/ClusterConfig https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/cluster-properties Args: String file_path: path to the file to read. Includes bucket and folders in the bucket Returns: (bytes, dict): Content of the file both as a string and yaml dict.
https://github.com/googleclouddataproc/jupyterhub-dataprocspawner/blob/8859c088088cf7e15f0b395aaa4b1334a1d1c894/dataprocspawner/spawner.py#L783-L825
import asyncio import json import math import os import random import re import string from datetime import datetime as dt from types import SimpleNamespace import proto import requests import yaml from async_generator import aclosing, async_generator, yield_ from dataprocspawner.customize_cluster import ( get_base_cluster_html_form, get_custom_cluster_html_form, ) from dataprocspawner.spawnable import DataprocHubServer from google.api_core import exceptions from google.cloud import logging_v2, storage from google.cloud.dataproc_v1beta2 import ( Cluster, ClusterControllerClient, ClusterStatus, ) from google.cloud.dataproc_v1beta2.services.cluster_controller.transports import ( ClusterControllerGrpcTransport, ) from google.cloud.dataproc_v1beta2.types.shared import Component from google.protobuf.json_format import MessageToDict from googleapiclient import discovery from jupyterhub import orm from jupyterhub.spawner import Spawner from tornado import web from traitlets import Bool, Dict, List, Unicode def url_path_join(*pieces): initial = pieces[0].startswith('/') final = pieces[-1].endswith('/') stripped = [s.strip('/') for s in pieces] result = '/'.join(s for s in stripped if s) if initial: result = '/' + result if final: result = result + '/' if result == '//': result = '/' return result def _validate_proto(data, proto_cls): if not isinstance(proto_cls, proto.message.MessageMeta): return if not data or not isinstance(data, dict): return meta = proto_cls._meta warnings = [] for field in data.copy(): if field in meta.fields: field_valid, new_warnings = _validate_proto_field(data[field], meta.fields[field]) warnings.extend(new_warnings) if not field_valid: warnings.append(f'Removing unknown/bad value {data[field]} for field {field}.') del data[field] else: warnings.append(f'Removing unknown field {field} for class {proto_cls}') del data[field] return warnings def _validate_proto_field(data, field_descriptor): if not data: return True, [] fd = field_descriptor warnings = [] if fd.message and isinstance(fd.message, proto.message.MessageMeta): if fd.message._meta.options and fd.message._meta.options.map_entry: return True, warnings to_validate = [data] if not fd.repeated else data for entry in to_validate: warnings.extend(_validate_proto(entry, fd.message)) return True, warnings elif fd.enum: if fd.repeated: to_del = [] for i, val in enumerate(data): if val not in fd.enum.__members__: warnings.append(f'Removing unknown/bad value {val} for field {fd.name}.') to_del.append(i) to_del.reverse() for i in to_del: del data[i] return True, warnings else: return (data in fd.enum.__members__, warnings) return True, warnings class DataprocSpawner(Spawner): poll_interval = 5 http_timeout = 1800 project = Unicode( config=True, help=""" The project on Google Cloud Platform that the Dataproc clusters should be created under. This must be configured. """,) region = Unicode( 'us-central1', config=True, help=""" The region in which to run the Dataproc cluster. Defaults to us-central1. Currently does not support using 'global' because the initialization for the cluster gRPC transport would be different. """,) zone = Unicode( 'us-central1-a', config=True, help=""" The zone in which to run the Dataproc cluster.""",) cluster_data = Dict( config=True, help=""" Admin provided dict for setting up Dataproc cluster. If this field is not provided, the cluster configuration is set using YAML files on GCE. """,) gcs_notebooks = Unicode( config=True, help=""" GCS location to save Notebooks for a stateful experience. This must be configured. """,) gcs_user_folder = Unicode( config=True, help=""" GCS location to save the user's Notebooks. """,) dataproc_configs = Unicode( config=True, help=""" Comma separated list of the dataproc configurations available in the user spawning form. Each path can be a bucket, subfolder or file and can include the prefix gs:// or not and the suffix / or not. Example: 'bucket/configs/,gs://bucket/configs/file.yaml,gs://bucket' """,) dataproc_default_subnet = Unicode( config=True, help=""" GCP subnet where to deploy the spawned Cloud Dataproc cluster. If not provided in the config yaml, defaults to the same as JupyterHub. """,) dataproc_service_account = Unicode( config=True, help=""" This solution uses a default service account for all spawned cluster if not provided by the administrator. """,) dataproc_locations_list = Unicode( '', config=True, help=""" Comma separated list of the zone letters where to spawn Cloud Dataproc in the JupyterHub region. Example: 'a,b' This must be configured. """,) idle_checker = Dict( {'idle_job_path': '', 'idle_path': '', 'timeout': '60m'}, config=True, help=""" Set up shutdown of a cluster after some idle time. Base on https://github.com/blakedubois/dataproc-idle-check idle_job - gcs path to https://github.com/blakedubois/dataproc-idle-check/blob/master/isIdleJob.sh idle_path - gcs path to https://github.com/blakedubois/dataproc-idle-check/blob/master/isIdle.sh timeout - idle time after which cluster will be shutdown Check official documentation: https://github.com/blakedubois/dataproc-idle-check """,) allow_custom_clusters = Bool( False, config=True, help=""" Allow users to customize their cluster. """,) allow_random_cluster_names = Bool( False, config=True, help=""" Allow users to randomize their cluster names. """,) default_notebooks_gcs_path = Unicode( '', config=True, help=""" The gcs path where default notebooks stored. Don't load default notebooks if variable is empty. """,) default_notebooks_folder = Unicode( 'examples/', config=True, help='The name of folder into which service will copy default notebooks',) machine_types_list = Unicode( '', config=True, help='Allowed machine types',) env_keep = List( ['PATH', 'LANG'], config=True, help=""" Whitelist of environment variables for the single-user server to inherit from the JupyterHub process. This whitelist ensures that sensitive information in the JupyterHub process's environment (such as `CONFIGPROXY_AUTH_TOKEN`) is not passed to the single-user server's process. """,) spawner_host_type = Unicode( '', config=True, help='Host type on which the Spawner is running (e.g. gce, ain)',) force_add_jupyter_component = Bool( True, config=True, help=""" Whether to always enable the JUPYTER and ANACONDA optional components even if not explicitly specified in the cluster config. It is recommended to set this to True, as clusters without these components will *not* function correctly when spawned. """,) cluster_name_pattern = Unicode( 'dataprochub-{}', config=True, help=""" Format string for name of the Dataproc cluster spawned for each user. The cluster name will be generated by calling cluster_name_pattern.format(username). """) show_spawned_clusters_in_notebooks_list = Bool( False, config=True, help=""" Whether to show spawned single-user clusters in the list of Dataproc Notebooks. """) force_single_user = Bool( False, config=True, help="""Whether a notebook on a cluster can only be accessed by the user who spawned it. """) def __init__(self, *args, **kwargs): mock = kwargs.pop('_mock', False) super().__init__(*args, **kwargs) self.operation = None self.component_gateway_url = None self.progressor = {} metadata_instance = 'http://metadata.google.internal/computeMetadata/v1/instance' if not self.region: try: r = requests.get( f'{metadata_instance}/zone', headers={'Metadata-Flavor': 'Google'}) r.raise_for_status() self.region = '-'.join(r.text.split('/')[-1].split('-')[:-1]) self.log.info(f'# Using region {self.region}') except Exception as e: self.log.info( 'Fetching Hub region failed. Probably not running on GCE. ' f'Consider setting JUPYTERHUB_REGION as a container env: {e}.') self.hub_host = '' if 'hub_host' in kwargs: self.hub_host = kwargs.get('hub_host') else: try: r = requests.get( f'{metadata_instance}/attributes/proxy-url', headers={'Metadata-Flavor': 'Google'}) r.raise_for_status() self.hub_host = f'https://{r.text}/' self.log.info(f'Got proxy url {r.text} from metadata') except Exception as e: self.log.info(f'Failed to get proxy url from metadata: {e}') if mock: self.dataproc_client = kwargs.get('dataproc') self.gcs_client = kwargs.get('gcs') self.logging_client = kwargs.get('logging') self.compute_client = kwargs.get('compute') else: self.client_transport = ( ClusterControllerGrpcTransport( host=f'{self.region}-dataproc.googleapis.com:443')) self.dataproc_client = ClusterControllerClient( client_options={'api_endpoint': f'{self.region}-dataproc.googleapis.com:443'}) self.gcs_client = storage.Client(project=self.project) self.logging_client = logging_v2.LoggingServiceV2Client() self.compute_client = discovery.build('compute', 'v1', cache_discovery=False) if self.gcs_notebooks: if self.gcs_notebooks.startswith('gs://'): self.gcs_notebooks = self.gcs_notebooks[5:] self.gcs_user_folder = f'gs://{self.gcs_notebooks}/{self.get_username()}' if self.allow_random_cluster_names: self.rand_str = '-' + self._get_rand_string(4) else: self.rand_str = '' self.dataproc_zones = self._validate_zones(self.region, self.dataproc_locations_list) self.progressor[self.clustername()] = SimpleNamespace( bar=0, logging=set(), start='', yields=[]) async def start(self): if not self.project: self._raise_exception('You need to set a project') if (await self.get_cluster_status(self.clustername()) == ClusterStatus.State.DELETING): self._raise_exception(f'Cluster {self.clustername()} is pending deletion.') if not await self.exists(self.clustername()): self.create_example_notebooks() self.operation = await self.create_cluster() self.component_gateway_url = await self.get_cluster_notebook_endpoint( self.clustername()) start_notebook_cmd = self.cmd + self.get_args() start_notebook_cmd = ' '.join(start_notebook_cmd) self.log.info(f'start_notebook_cmd is: {start_notebook_cmd}') orm_server = orm.Server( proto='https', ip=self.component_gateway_url, port=443, base_url=self.user.base_url, cookie_name='cookie') cluster_data = { 'cluster_name': self.clustername(), 'cluster_project': self.project, 'cluster_region': self.region, } self._server = DataprocHubServer( dataproc_client=self.dataproc_client, cluster_data=cluster_data, connect_url=self.component_gateway_url, orm_server=orm_server) return self.component_gateway_url async def stop(self): self.log.info(f'Stopping cluster with name {self.clustername()}') if await self.exists(self.clustername()): result = self.dataproc_client.delete_cluster( project_id=self.project, region=self.region, cluster_name=self.clustername()) return result self.log.info(f'No cluster with name {self.clustername()}') return None async def poll(self): status = await self.get_cluster_status(self.clustername()) if status is None or status in (ClusterStatus.State.DELETING, ClusterStatus.State.UNKNOWN): return 1 elif status == ClusterStatus.State.ERROR: return 1 elif status == ClusterStatus.State.CREATING: self.log.info(f'{self.clustername()} is creating.') return None elif status in (ClusterStatus.State.RUNNING, ClusterStatus.State.UPDATING): self.log.info(f'{self.clustername()} is up and running.') return None @async_generator async def _generate_progress(self): if not self._spawn_pending or not self.operation: self.log.warning( "Spawn not pending, can't generate progress for %s", self._log_name) return operation_id = self.operation.operation.name.split('/')[-1] cluster_uuid = self.operation.metadata.cluster_uuid message_uuid = f'Operation {operation_id} for cluster uuid {cluster_uuid}' spawner_progressor = self.progressor[self.clustername()] self.log.debug(f'# Running _generate_progress with {spawner_progressor}.') if len(spawner_progressor.yields) == 0: yields_start = ( {'progress': 0, 'message': 'Server requested.'}, {'progress': 5, 'message': message_uuid}, ) spawner_progressor.bar = yields_start[-1]['progress'] spawner_progressor.logging.add('yields_start') spawner_progressor.yields += list(yields_start) spawner_progressor.start = dt.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') for y in spawner_progressor.yields: await yield_(y) async with aclosing(self.progress()) as progress: async for event in progress: await yield_(event) @async_generator async def progress(self): spawner_progressor = self.progressor[self.clustername()] if not self.operation: msg_existing = ( 'Trying to load progress but no cluster being created. One reason ' f'might be that a cluster named {self.clustername()} already exists ' 'and it was not spawned from this Dataproc Hub instance.') await yield_({'progress': 100, 'failed': True, 'message': msg_existing}) self._raise_exception(msg_existing) resources = [f'projects/{self.project}'] log_methods = {'doStart', 'instantiateMe', 'getOrCreateAgent', 'run', 'runBuiltinInitializationActions', 'awaitNameNodeSafeModeExit', 'runCustomInitializationActions'} filters_methods = ' OR '.join(f'"{method}"' for method in log_methods) filters_base = ( f'resource.type=cloud_dataproc_cluster AND ' f'resource.labels.cluster_name="{self.clustername()}" AND ' f'resource.labels.cluster_uuid="{self.operation.metadata.cluster_uuid}" AND ' f'log_name="projects/{self.project}/logs/google.dataproc.agent" AND ' f'labels."compute.googleapis.com/resource_name"="{self.clustername()}-m"' f' AND jsonPayload.method=({filters_methods})' ) filters = f'{filters_base} AND timestamp>="{spawner_progressor.start}"' self.log.debug(f'Filters are: {filters}') operation_done = False while not operation_done: try: operation_done = self.operation.done() except exceptions.GoogleAPICallError as e: self.log.warning(f'Error operation.done(): {e.message}') if not operation_done: await asyncio.sleep(10) try: self.log.info( f'# {self.clustername()}: {spawner_progressor.bar}%, fetching logs.') for entry in self.logging_client.list_log_entries(resources, filter_=filters): if entry.insert_id not in spawner_progressor.logging: payload = MessageToDict(entry.json_payload) message = f'{payload.get("method")}: {payload.get("message")}' spawner_progressor.bar += math.ceil((90 - spawner_progressor.bar) / 4) spawner_progressor.logging.add(entry.insert_id) tmp_yield = {'progress': spawner_progressor.bar, 'message': message} spawner_progressor.yields.append(tmp_yield) await yield_(tmp_yield) except (exceptions.GoogleAPICallError, exceptions.RetryError) as e: await yield_({'progress': spawner_progressor.bar, 'message': e.message}) continue except ValueError: await yield_({'progress': spawner_progressor.bar, 'message': 'ValueError'}) continue if self.operation.metadata.status.inner_state == 'FAILED': await yield_({ 'progress': 100, 'failed': True, 'message': f'FAILED: {self.operation.operation.error.message}'}) if self.operation.metadata.status.inner_state == 'DONE': status = await self.get_cluster_status(self.clustername()) if status == ClusterStatus.State.RUNNING: await yield_({ 'progress': 95, 'message': 'Cluster successfully created'}) def _options_form_default(self): base_html = get_base_cluster_html_form( self._list_gcs_files(self.dataproc_configs), self.dataproc_zones, self.region ) html_customize_cluster = '' if self.allow_custom_clusters: html_customize_cluster = get_custom_cluster_html_form( self._get_autoscaling_policy(), self.machine_types_list.split(',') ) return '\n'.join([ base_html, html_customize_cluster ]) def _list_gcs_files(self, gcs_paths, sep=',', sort=True): config_paths = [] if gcs_paths: for path in gcs_paths.split(sep): path = self._clean_gcs_path(path, return_gs=False) gcs_bucket = path.split('/')[0] gcs_prefix = '/'.join(path.split('/')[1:]) try: config_paths += [ f'{gcs_bucket}/{b.name}' for b in self.gcs_client.list_blobs(gcs_bucket, prefix=gcs_prefix)] except exceptions.NotFound: pass config_paths = list(set(config_paths)) if sort: config_paths = sorted(config_paths) return config_paths if config_paths else '' async def get_options_form(self): return self._options_form_default() def options_from_form(self, formdata): self.log.info(f'formdata is {formdata}') options = {} for key, value in formdata.items(): if value and isinstance(value, list): value = value[0] else: value = None if key == 'cluster_zone': self.zone = value or self.zone options[key] = value return options def get_env(self): env = super().get_env() remove_env = [ 'JUPYTERHUB_API_TOKEN', 'JPY_API_TOKEN', 'JUPYTERHUB_CLIENT_ID', 'JUPYTERHUB_OAUTH_CALLBACK_URL', 'JUPYTERHUB_API_URL', ] for e in remove_env: env[e] = '' self.log.debug(f'env is {env}') return env def get_args(self): args = [] if self.debug: args.append('--debug') args.append('--SingleUserNotebookApp.hub_activity_interval=0') args.append('--SingleUserNotebookApp.hub_host={}'.format(self.hub_host)) args.extend(self.args) return args def _raise_exception(self, msg): e = web.HTTPError(500, msg) e.jupyterhub_message = msg raise e def get_dataproc_master_fqdn(self): if ':' in self.project: domain_name, domain_project = self.project.split(':') return f'{self.clustername()}-m.{self.zone}.c.{domain_project}.{domain_name}.internal' else: return f'{self.clustername()}-m.{self.zone}.c.{self.project}.internal' def camelcase_to_snakecase(self, cc): sc = re.sub('(^[_a-z \t-]*)([a-z])([A-Z])', r'\1\2_\3', cc) sc = re.sub('([a-z])([A-Z])(?=.+:)', r'\1_\2', sc) sc = re.sub('([a-zA-Z0-9_]+):', lambda m: m.group(0).lower(), sc) return sc def read_gcs_file(self, file_path) -> dict: if file_path: file_path = file_path.replace('gs://', '').replace('//', '/').split('/') bn = file_path[0] fp = '/'.join(file_path[1:]) working_bucket = self.gcs_client.get_bucket(bn) config_blob = working_bucket.get_blob(fp) config_string = config_blob.download_as_string() config_string = config_string.decode('utf-8') return config_string
Apache License 2.0
guilgautier/dppy
dppy/exact_sampling.py
proj_dpp_sampler_eig_GS
python
def proj_dpp_sampler_eig_GS(eig_vecs, size=None, random_state=None): rng = check_random_state(random_state) V = eig_vecs N, rank = V.shape if size is None: size = rank ground_set = np.arange(N) sampl = np.zeros(size, dtype=int) avail = np.ones(N, dtype=bool) c = np.zeros((N, size)) norms_2 = inner1d(V, axis=1) for it in range(size): j = rng.choice(ground_set[avail], p=np.abs(norms_2[avail]) / (rank - it)) sampl[it] = j if it == size - 1: break avail[j] = False c[avail, it] = (V[avail, :].dot(V[j, :]) - c[avail, :it].dot(c[j, :it])) / np.sqrt(norms_2[j]) norms_2[avail] -= c[avail, it]**2 return sampl.tolist()
Sample from projection :math:`\\operatorname{DPP}(K)` using the eigendecomposition of the projection kernel :math:`K=VV^{\\top}` where :math:`V^{\\top}V = I_r` and :math:`r=\\operatorname{rank}(\\mathbf{K})`. It performs sequential update of Cholesky decomposition, which is equivalent to Gram-Schmidt orthogonalization of the rows of the eigenvectors. :param eig_vecs: Eigenvectors used to form projection kernel :math:`K=VV^{\\top}`. :type eig_vecs: array_like :return: A sample from projection :math:`\\operatorname{DPP}(K)`. :rtype: list, array_like .. seealso:: - cite:`TrBaAm18` Algorithm 3, :cite:`Gil14` Algorithm 2 - :func:`proj_dpp_sampler_eig_GS_bis <proj_dpp_sampler_eig_GS_bis>` - :func:`proj_dpp_sampler_eig_KuTa12 <proj_dpp_sampler_eig_KuTa12>`
https://github.com/guilgautier/dppy/blob/cb4577f75ca998481ca6f248af10b19f986eca1c/dppy/exact_sampling.py#L444-L504
import numpy as np import scipy.linalg as la from dppy.utils import inner1d, check_random_state, get_progress_bar from dppy.intermediate_sampling import (vfx_sampling_precompute_constants, vfx_sampling_do_sampling_loop, alpha_dpp_sampling_precompute_constants, alpha_dpp_sampling_do_sampling_loop) def proj_dpp_sampler_kernel(kernel, mode='GS', size=None, random_state=None): rng = check_random_state(random_state) if size: rank = np.rint(np.trace(kernel)).astype(int) if size > rank: raise ValueError('size k={} > rank={}'. format(size, rank)) if mode == 'GS': sampl = proj_dpp_sampler_kernel_GS(kernel, size, rng) elif mode == 'Chol': sampl = proj_dpp_sampler_kernel_Chol(kernel, size, rng)[0] elif mode == 'Schur': sampl = proj_dpp_sampler_kernel_Schur(kernel, size, rng) else: str_list = ['Invalid sampling mode, choose among:', '- "GS (default)', '- "Chol"', '- "Schur"', 'Given "{}"'.format(mode)] raise ValueError('\n'.join(str_list)) return sampl def proj_dpp_sampler_kernel_Chol(K, size=None, random_state=None): rng = check_random_state(random_state) hermitian = True if K.dtype.kind == 'c' else False N, rank = len(K), np.rint(np.trace(K)).astype(int) if size is None: size = rank A = K.copy() d = np.diagonal(A).astype(float) orig_indices = np.arange(N) for j in range(size): t = rng.choice(range(j, N), p=np.abs(d[j:]) / (rank - j)) A[t + 1:, [j, t]] = A[t + 1:, [t, j]] tmp = A[j + 1:t, j].copy() np.conj(A[t, j + 1:t], out=A[j + 1:t, j]) np.conj(tmp, out=A[t, j + 1:t]) A[t, j] = A[t, j].conj() A[[j, t], [j, t]] = A[[t, j], [t, j]].real A[[j, t], :j] = A[[t, j], :j] orig_indices[[j, t]] = orig_indices[[t, j]] d[[j, t]] = d[[t, j]] A[j, j] = np.sqrt(d[j]) if j == size - 1: break A[j + 1:, j] -= A[j + 1:, :j].dot(A[j, :j].conj()) A[j + 1:, j] /= A[j, j] if hermitian: d[j + 1:] -= A[j + 1:, j].real**2 + A[j + 1:, j].imag**2 else: d[j + 1:] -= A[j + 1:, j]**2 return orig_indices[:size].tolist(), A[:size, :size] def proj_dpp_sampler_kernel_GS(K, size=None, random_state=None): rng = check_random_state(random_state) N, rank = len(K), np.rint(np.trace(K)).astype(int) if size is None: size = rank ground_set = np.arange(N) sampl = np.zeros(size, dtype=int) avail = np.ones(N, dtype=bool) c = np.zeros((N, size)) norm_2 = K.diagonal().copy() for it in range(size): j = rng.choice(ground_set[avail], p=np.abs(norm_2[avail]) / (rank - it)) sampl[it] = j if it == size - 1: break avail[j] = False c[avail, it] = (K[avail, j] - c[avail, :it].dot(c[j, :it])) / np.sqrt(norm_2[j]) norm_2[avail] -= c[avail, it]**2 return sampl.tolist() def proj_dpp_sampler_kernel_Schur(K, size=None, random_state=None): rng = check_random_state(random_state) N, rank = len(K), np.rint(np.trace(K)).astype(int) if size is None: size = rank ground_set = np.arange(N) sampl = np.zeros(size, dtype=int) avail = np.ones(N, dtype=bool) schur_comp = K.diagonal().copy() K_inv = np.zeros((size, size)) for it in range(size): j = rng.choice(ground_set[avail], p=np.abs(schur_comp[avail]) / (rank - it)) sampl[it], avail[j] = j, False if it == 0: K_inv[0, 0] = 1.0 / K[j, j] elif it == 1: i = sampl[0] K_inv[:2, :2] = np.array([[K[j, j], -K[j, i]], [-K[j, i], K[i, i]]]) / (K[i, i] * K[j, j] - K[j, i]**2) elif it < size - 1: temp = K_inv[:it, :it].dot(K[sampl[:it], j]) schur_j = K[j, j] - K[j, sampl[:it]].dot(temp) K_inv[:it, :it] += np.outer(temp, temp / schur_j) K_inv[:it, it] = - temp / schur_j K_inv[it, :it] = K_inv[:it, it] K_inv[it, it] = 1.0 / schur_j else: break K_iY = K[np.ix_(avail, sampl[:it + 1])] schur_comp[avail] = K[avail, avail] - inner1d(K_iY.dot(K_inv[:it+1, :it+1]), K_iY, axis=1) return sampl.tolist() def dpp_sampler_generic_kernel(K, random_state=None): rng = check_random_state(random_state) A = K.copy() sample = [] for j in range(len(A)): if rng.rand() < A[j, j]: sample.append(j) else: A[j, j] -= 1 A[j + 1:, j] /= A[j, j] A[j + 1:, j + 1:] -= np.outer(A[j + 1:, j], A[j, j + 1:]) return sample, A def dpp_eig_vecs_selector(ber_params, eig_vecs, random_state=None): rng = check_random_state(random_state) ind_sel = rng.rand(ber_params.size) < ber_params return eig_vecs[:, ind_sel] def proj_dpp_sampler_eig(eig_vecs, mode='GS', size=None, random_state=None): rng = check_random_state(random_state) if eig_vecs.shape[1]: if mode == 'GS': sampl = proj_dpp_sampler_eig_GS(eig_vecs, size, rng) elif mode == 'GS_bis': sampl = proj_dpp_sampler_eig_GS_bis(eig_vecs, size, rng) elif mode == 'KuTa12': sampl = proj_dpp_sampler_eig_KuTa12(eig_vecs, size, rng) else: str_list = ['Invalid sampling mode, choose among:', '- "GS" (default)', '- "GS_bis"', '- "KuTa12"', 'Given "{}"'.format(mode)] raise ValueError('\n'.join(str_list)) else: sampl = [] return sampl
MIT License
whtsky/catsup
catsup/cli.py
install
python
def install(name): from catsup.themes.install import install_theme install_theme(name=name)
Usage: catsup install <name> Options: -h --help Show this screen and exit.
https://github.com/whtsky/catsup/blob/e7db1ce7d84cef7efc9923b9bd9047319fc9822e/catsup/cli.py#L225-L235
import os from catsup.options import g from catsup.logger import logger, enable_pretty_logging enable_pretty_logging() import catsup doc = ( """Catsup v%s Usage: catsup init [<path>] catsup build [-s <file>|--settings=<file>] catsup deploy [-s <file>|--settings=<file>] catsup git [-s <file>|--settings=<file>] catsup rsync [-s <file>|--settings=<file>] catsup server [-s <file>|--settings=<file>] [-p <port>|--port=<port>] catsup webhook [-s <file>|--settings=<file>] [-p <port>|--port=<port>] catsup watch [-s <file>|--settings=<file>] catsup clean [-s <file>|--settings=<file>] catsup themes catsup install <theme> catsup -h | --help catsup --version Options: -h --help Show this screen and exit. -s --settings=<file> specify a config file. [default: config.json] -f --file=<file> specify a wordpress output file. -o --output=<dir> specify a output folder. [default: .] -p --port=<port> specify the server port. [default: 8888] -g --global install theme to global theme folder. """ % catsup.__version__ ) from parguments import Parguments parguments = Parguments(doc, version=catsup.__version__) @parguments.command def init(path): from catsup.parser.utils import create_config_file create_config_file(path) @parguments.command def build(settings): from catsup.generator import Generator generator = Generator(settings) generator.generate() @parguments.command def deploy(settings): import catsup.parser import catsup.deploy config = catsup.parser.config(settings) if config.deploy.default == "git": catsup.deploy.git(config) elif config.deploy.default == "rsync": catsup.deploy.rsync(config) else: logger.error("Unknown deploy: %s" % config.deploy.default) @parguments.command def git(settings): import catsup.parser.config import catsup.deploy config = catsup.parser.config(settings) catsup.deploy.git(config) @parguments.command def rsync(settings): import catsup.parser.config import catsup.deploy config = catsup.parser.config(settings) catsup.deploy.rsync(config) @parguments.command def server(settings, port): import catsup.server preview_server = catsup.server.PreviewServer(settings, port) preview_server.run() @parguments.command def webhook(settings, port): import catsup.server server = catsup.server.WebhookServer(settings, port) server.run() @parguments.command def watch(settings): from catsup.generator import Generator from catsup.server import CatsupEventHandler from watchdog.observers import Observer generator = Generator(settings) generator.generate() event_handler = CatsupEventHandler(generator) observer = Observer() for path in [generator.config.config.source, g.theme.path]: path = os.path.abspath(path) observer.schedule(event_handler, path=path, recursive=True) observer.start() while True: pass @parguments.command def clean(settings: str): import shutil import catsup.parser.config config = catsup.parser.config(settings) for path in [config.config.static_output, config.config.output]: if os.path.exists(path): shutil.rmtree(path) @parguments.command def themes(): from catsup.parser.themes import list_themes list_themes() @parguments.command
MIT License
peoplepower/botlab
com.ppc.Bot/utilities/utilities.py
relative_f_to_c_degree
python
def relative_f_to_c_degree(fahrenheit_degree): return float(fahrenheit_degree) * 0.555556
Convert relative degrees Fahrenheit to Celsius For example, if I want to increase the thermostat 3 degrees F, then I need to adjust it 1.66668 degrees C. :param fahrenheit_degree: relative number of degrees F :return: relative number of degrees C
https://github.com/peoplepower/botlab/blob/21cc90c558a17b7ef4a42bca247b437d2f968dc0/com.ppc.Bot/utilities/utilities.py#L183-L190
ONE_SECOND_MS = 1000 ONE_MINUTE_MS = 60 * ONE_SECOND_MS ONE_HOUR_MS = ONE_MINUTE_MS * 60 ONE_DAY_MS = ONE_HOUR_MS * 24 ONE_WEEK_MS = ONE_DAY_MS * 7 ONE_MONTH_MS = ONE_DAY_MS * 30 ONE_YEAR_MS = ONE_DAY_MS * 365 ICON_FONT_FONTAWESOME_REGULAR = "far" ICON_FONT_FONTAWESOME_BOLD = "fab" ICON_FONT_FONTAWESOME_LIGHT = "fal" ICON_FONT_FONTAWESOME_SOLID = "fas" ICON_FONT_PEOPLEPOWER_REGULAR = "iotr" ICON_FONT_PEOPLEPOWER_LIGHT = "iotl" ICON_FONT_WEATHER_REGULAR = "wir" ICON_FONT_WEATHER_LIGHT = "wil" MODE_HOME = "HOME" MODE_AWAY = "AWAY" MODE_STAY = "STAY" MODE_TEST = "TEST" MODE_ATTRIBUTE_SILENT = "SILENT" MODE_ATTRIBUTE_DURESS = "DURESS" OCCUPANCY_STATUS_PRESENT = "PRESENT" OCCUPANCY_STATUS_ABSENT = "ABSENT" OCCUPANCY_STATUS_H2S = "H2S" OCCUPANCY_STATUS_SLEEP = "SLEEP" OCCUPANCY_STATUS_S2H = "S2H" OCCUPANCY_STATUS_H2A = "H2A" OCCUPANCY_STATUS_A2H = "A2H" OCCUPANCY_STATUS_VACATION = "VACATION" ALARM_CODE_GENERAL_BURGLARY = "E130" ALARM_CODE_PERIMETER_WINDOW_BURGLARY = "E131" ALARM_CODE_PERIMETER_DOOR_BURGLARY = "E134" ALARM_CODE_BURGLARY_NO_DISPATCH = "E136" ALARM_CODE_LEAK = "E154" ALARM_CODE_RECENT_CLOSING = "E459" ALARM_CODE_DURESS = "E122" ALARM_CODE_HIGH_TEMPERATURE = "E158" ALARM_CODE_LOW_TEMPERATURE = "E159" ALARM_CODE_CARBON_MONOXIDE = "E162" ALARM_CODE_MEDICATION_DISPENCER = "E330" ALARM_CODE_SMOKE_DETECTOR = "E111" ALARM_CODE_MEDICAL_ALARM = "E100" ALARM_CODE_GENERAL_MEDICAL_ALARM = "E102" ALARM_CODE_WELLNESS_NO_DISPATCH = "E103" ALARM_CODE_WELLNESS_DISPATCH = "E106" ALARM_CODE_COMMS_FAILURE = "E354" PROFESSIONAL_MONITORING_NEVER_PURCHASED = 0 PROFESSIONAL_MONITORING_PURCHASED_BUT_NOT_ENOUGH_INFO = 1 PROFESSIONAL_MONITORING_REGISTRATION_PENDING = 2 PROFESSIONAL_MONITORING_REGISTERED = 3 PROFESSIONAL_MONITORING_CANCELLATION_PENDING = 4 PROFESSIONAL_MONITORING_CANCELLED = 5 PROFESSIONAL_MONITORING_ALERT_STATUS_QUIET = 0 PROFESSIONAL_MONITORING_ALERT_STATUS_RAISED = 1 PROFESSIONAL_MONITORING_ALERT_STATUS_CANCELLED = 2 PROFESSIONAL_MONITORING_ALERT_STATUS_REPORTED = 3 PUSH_SOUND_ALARM = "alarm.wav" PUSH_SOUND_BEEP = "beep.wav" PUSH_SOUND_BELL = "bell.wav" PUSH_SOUND_BIRD = "bird.wav" PUSH_SOUND_BLING = "bling.wav" PUSH_SOUND_CAMERA = "camera_shutter.wav" PUSH_SOUND_CLICK = "click.wav" PUSH_SOUND_DOG = "dog.wav" PUSH_SOUND_DROID = "droid.wav" PUSH_SOUND_ENTRY_DELAY = "entry_delay.wav" PUSH_SOUND_FULLY_ARMED = "fullyarmed.wav" PUSH_SOUND_GUN_COCK = "guncock.wav" PUSH_SOUND_GUN_SHOT = "gunshot.wav" PUSH_SOUND_LOCK = "lock.wav" PUSH_SOUND_PHASER = "phaser.wav" PUSH_SOUND_PONG = "pong.wav" PUSH_SOUND_SILENCE = "silence.wav" PUSH_SOUND_TOGGLE = "toggle.wav" PUSH_SOUND_TRUMPET = "trumpet.wav" PUSH_SOUND_WARNING = "warning.wav" PUSH_SOUND_WHISTLE = "whistle.wav" PUSH_SOUND_WHOOPS = "whoops.wav" def can_contact_customer_support(botengine): import importlib try: properties = importlib.import_module('properties') except ImportError: return False if properties.get_property(botengine, "CS_SCHEDULE_URL") is not None: if len(properties.get_property(botengine, "CS_SCHEDULE_URL")) > 0: return True if properties.get_property(botengine, "CS_EMAIL_ADDRESS") is not None: if len(properties.get_property(botengine, "CS_EMAIL_ADDRESS")) > 0: return True if properties.get_property(botengine, "CS_PHONE_NUMBER") is not None: if len(properties.get_property(botengine, "CS_PHONE_NUMBER")) > 0: return True def alarm_code_to_description(code): if ALARM_CODE_GENERAL_BURGLARY in code: return _("has a burglar alarm") elif ALARM_CODE_PERIMETER_WINDOW_BURGLARY in code: return _("has a burglar alarm because a window opened") elif ALARM_CODE_PERIMETER_DOOR_BURGLARY in code: return _("has a burglar alarm because a door opened") elif ALARM_CODE_LEAK in code: return _("is experiencing a water leak") elif ALARM_CODE_RECENT_CLOSING in code: return _("has a burglar alarm, shortly after arming") elif ALARM_CODE_DURESS in code: return _("entered a duress code, indicating a potential threat.") else: return _("may need some assistance") def celsius_to_fahrenheit(celsius): return float(celsius) * 9.0/5.0 + 32 def fahrenheit_to_celsius(fahrenheit): return (float(fahrenheit) - 32) * 5.0/9.0
Apache License 2.0
shotgunsoftware/sg-jira-bridge
tests/python/test_sync_base.py
TestSyncBase._get_syncer
python
def _get_syncer(self, mocked_sg, name="task_issue"): mocked_sg.return_value = self._get_mocked_sg_handle() bridge = sg_jira.Bridge.get_bridge( os.path.join(self._fixtures_path, "settings.py") ) syncer = bridge.get_syncer(name) return syncer, bridge
Helper to get a syncer and a bridge with a mocked ShotGrid. :param mocked_sg: Mocked shotgun_api3.Shotgun. :parma str name: A syncer name.
https://github.com/shotgunsoftware/sg-jira-bridge/blob/d0602bcbb120b01853bb3bb24c496d356ee53908/tests/python/test_sync_base.py#L46-L58
import os from shotgun_api3.lib import mockgun import sg_jira from test_base import TestBase class ExtMockgun(mockgun.Shotgun): def add_user_agent(*args, **kwargs): pass def set_session_uuid(*args, **kwargs): pass class TestSyncBase(TestBase): def _get_mocked_sg_handle(self): return ExtMockgun("https://mocked.my.com", "Ford Prefect", "xxxxxxxxxx",)
MIT License
hachmannlab/chemml
chemml/chem/magpie_python/attributes/generators/composition/GCLPAttributeGenerator.py
GCLPAttributeGenerator.set_phases
python
def set_phases(self, phases, energies): self.GCLPCalculator = GCLPCalculator() self.GCLPCalculator.add_phases(phases, energies)
Function to define phases used when computing ground states. Parameters ---------- phases : array-like Compositions to consider. A list of CompositionEntry's. energies : array-like Corresponding energies. A list of float values.
https://github.com/hachmannlab/chemml/blob/74a0c6de0229a1c94c5419f16cb9ba24f32bd38d/chemml/chem/magpie_python/attributes/generators/composition/GCLPAttributeGenerator.py#L40-L53
import types from math import sqrt, log import numpy as np import pandas as pd from ....data.materials.CompositionEntry import CompositionEntry from ....data.materials.util.GCLPCalculator import GCLPCalculator class GCLPAttributeGenerator: GCLPCalculator = None count_phases = True
BSD 3-Clause New or Revised License
pykaldi/pykaldi
kaldi/matrix/_matrix.py
_VectorBase.range
python
def range(self, start, length): return SubVector(self, start, length)
Returns the given range of elements as a new vector view. Args: start (int): The start index. length (int): The length. Returns: SubVector: A vector view representing the given range.
https://github.com/pykaldi/pykaldi/blob/b4e7a15a31286e57c01259edfda54d113b5ceb0e/kaldi/matrix/_matrix.py#L103-L113
import sys import numpy from . import _compressed_matrix from . import _kaldi_matrix from . import _kaldi_matrix_ext from . import _kaldi_vector from . import _kaldi_vector_ext from . import _matrix_ext import _matrix_common from . import _packed_matrix from . import _sp_matrix from . import _tp_matrix from . import _str class _VectorBase(object): def copy_(self, src): if self.dim != src.dim: raise ValueError("Vector of size {} cannot be copied into vector " "of size {}.".format(src.dim, self.dim)) if isinstance(src, _kaldi_vector.VectorBase): return self._copy_from_vec_(src) elif isinstance(src, _kaldi_vector.DoubleVectorBase): _kaldi_vector_ext._copy_from_double_vec(self, src) return self else: raise TypeError("input vector type is not supported.") def clone(self): return Vector(self) def size(self): return (self.dim,) @property def shape(self): return self.size() def approx_equal(self, other, tol=0.01): if not isinstance(other, _kaldi_vector.VectorBase): return False if self.dim != other.dim: return False return self._approx_equal(other, tol) def __eq__(self, other): return self.approx_equal(other, 1e-16) def numpy(self): return _matrix_ext.vector_to_numpy(self) @property def data(self): return self.numpy().data
Apache License 2.0
ibm/matrix-capsules-with-em-routing
train_val.py
find_checkpoint
python
def find_checkpoint(load_dir, seen_step): ckpt = tf.train.get_checkpoint_state(load_dir) if ckpt and ckpt.model_checkpoint_path: global_step = extract_step(ckpt.model_checkpoint_path) if int(global_step) != seen_step: return int(global_step), ckpt.model_checkpoint_path return -1, None
Finds the global step for the latest written checkpoint to the load_dir. Credit: Sara Sabour https://github.com/Sarasra/models/blob/master/research/capsules/ experiment.py Args: load_dir: The directory address to look for the training checkpoints. seen_step: Latest step which evaluation has been done on it. Returns: The latest new step in the load_dir and the file path of the latest model in load_dir. If no new file is found returns -1 and None.
https://github.com/ibm/matrix-capsules-with-em-routing/blob/2da9cdf9e1787f0b0984f7673f644d47b08f220c/train_val.py#L643-L662
import tensorflow as tf import tensorflow.contrib.slim as slim from tensorflow.python import debug as tf_debug import numpy as np import time import sys import os import re from config import FLAGS import config as conf import models as mod import metrics as met import utils as utl import daiquiri logger = daiquiri.getLogger(__name__) def main(args): tf.set_random_seed(1234) train_dir, train_summary_dir = conf.setup_train_directories() conf.setup_logger(logger_dir=train_dir, name="logger_train.txt") conf.load_or_save_hyperparams(train_dir) logger.info('Using dataset: {}'.format(FLAGS.dataset)) dataset_size_train = conf.get_dataset_size_train(FLAGS.dataset) dataset_size_val = conf.get_dataset_size_validate(FLAGS.dataset) build_arch = conf.get_dataset_architecture(FLAGS.dataset) num_classes = conf.get_num_classes(FLAGS.dataset) create_inputs_train = conf.get_create_inputs(FLAGS.dataset, mode="train") create_inputs_val = conf.get_create_inputs(FLAGS.dataset, mode="validate") logger.info('BUILD TRAIN GRAPH') g_train = tf.Graph() with g_train.as_default(), tf.device('/cpu:0'): global_step = tf.train.get_or_create_global_step() num_batches_per_epoch = int(dataset_size_train / FLAGS.batch_size) lrn_rate = tf.train.exponential_decay(learning_rate = FLAGS.lrn_rate, global_step = global_step, decay_steps = 20000, decay_rate = 0.96) tf.summary.scalar('learning_rate', lrn_rate) opt = tf.train.AdamOptimizer(learning_rate=lrn_rate) input_dict = create_inputs_train() batch_x = input_dict['image'] batch_labels = input_dict['label'] splits_x = tf.split( axis=0, num_or_size_splits=FLAGS.num_gpus, value=batch_x) splits_labels = tf.split( axis=0, num_or_size_splits=FLAGS.num_gpus, value=batch_labels) tower_grads = [] tower_losses = [] tower_logits = [] reuse_variables = None for i in range(FLAGS.num_gpus): with tf.device('/gpu:%d' % i): with tf.name_scope('tower_%d' % i) as scope: logger.info('TOWER %d' % i) with slim.arg_scope([slim.variable], device='/cpu:0'): loss, logits = tower_fn( build_arch, splits_x[i], splits_labels[i], scope, num_classes, reuse_variables=reuse_variables, is_train=True) reuse_variables = True grads = opt.compute_gradients(loss) tower_grads.append(grads) tower_logits.append(logits) tower_losses.append(loss) tf.summary.scalar("loss", loss) grad = average_gradients(tower_grads) grad_check = ([tf.check_numerics(g, message='Gradient NaN Found!') for g, _ in grad if g is not None] + [tf.check_numerics(loss, message='Loss NaN Found')]) with tf.control_dependencies(grad_check): update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) with tf.control_dependencies(update_ops): train_op = opt.apply_gradients(grad, global_step=global_step) loss = tf.reduce_mean(tower_losses) logits = tf.concat(tower_logits, axis=0) acc = met.accuracy(logits, batch_labels) probs = tf.nn.softmax(logits=logits) labels_oh = tf.one_hot(batch_labels, num_classes) trn_metrics = {'loss' : loss, 'labels' : batch_labels, 'labels_oh' : labels_oh, 'logits' : logits, 'probs' : probs, 'acc' : acc, } trn_reset = {} trn_read = {} tf.summary.scalar('trn_loss', loss) tf.summary.scalar('trn_acc', acc) saver = tf.train.Saver(tf.global_variables(), max_to_keep=None) train_params = np.sum([np.prod(v.get_shape().as_list()) for v in tf.trainable_variables()]).astype(np.int32) logger.info('Trainable Parameters: {}'.format(train_params)) trn_summary = tf.summary.merge_all() logger.info('BUILD VALIDATION GRAPH') g_val = tf.Graph() with g_val.as_default(): global_step = tf.train.get_or_create_global_step() num_batches_val = int(dataset_size_val / FLAGS.batch_size * FLAGS.val_prop) input_dict = create_inputs_val() batch_x = input_dict['image'] batch_labels = input_dict['label'] splits_x = tf.split( axis=0, num_or_size_splits=FLAGS.num_gpus, value=batch_x) splits_labels = tf.split( axis=0, num_or_size_splits=FLAGS.num_gpus, value=batch_labels) tower_logits = [] reuse_variables = None for i in range(FLAGS.num_gpus): with tf.device('/gpu:%d' % i): with tf.name_scope('tower_%d' % i) as scope: with slim.arg_scope([slim.variable], device='/cpu:0'): loss, logits = tower_fn( build_arch, splits_x[i], splits_labels[i], scope, num_classes, reuse_variables=reuse_variables, is_train=False) reuse_variables = True tower_logits.append(logits) tf.summary.histogram("val_logits", logits) logits = tf.concat(tower_logits, axis=0) val_loss = mod.spread_loss(logits, batch_labels) val_acc = met.accuracy(logits, batch_labels) val_probs = tf.nn.softmax(logits=logits) val_labels_oh = tf.one_hot(batch_labels, num_classes) val_metrics = {'loss' : val_loss, 'labels' : batch_labels, 'labels_oh' : val_labels_oh, 'logits' : logits, 'probs' : val_probs, 'acc' : val_acc, } val_reset = {} val_read = {} tf.summary.scalar("val_loss", val_loss) tf.summary.scalar("val_acc", val_acc) saver = tf.train.Saver(max_to_keep=None) val_summary = tf.summary.merge_all() sess_train = tf.Session(config=tf.ConfigProto(allow_soft_placement=True, log_device_placement=False), graph=g_train) if FLAGS.debugger is not None: sess_train = tf_debug.TensorBoardDebugWrapperSession(sess_train, FLAGS.debugger) with g_train.as_default(): sess_train.run([tf.global_variables_initializer(), tf.local_variables_initializer()]) if FLAGS.load_dir is not None: load_dir_checkpoint = os.path.join(FLAGS.load_dir, "train", "checkpoint") prev_step = load_training(saver, sess_train, load_dir_checkpoint) else: prev_step = 0 summary_writer = tf.summary.FileWriter(train_summary_dir, graph=sess_train.graph) sess_val = tf.Session(config=tf.ConfigProto(allow_soft_placement=True, log_device_placement=False), graph=g_val) with g_val.as_default(): sess_val.run([tf.local_variables_initializer(), tf.global_variables_initializer()]) SUMMARY_FREQ = 100 SAVE_MODEL_FREQ = num_batches_per_epoch VAL_FREQ = num_batches_per_epoch PROFILE_FREQ = 5 for step in range(prev_step, FLAGS.epoch * num_batches_per_epoch + 1): epoch_decimal = step/num_batches_per_epoch epoch = int(np.floor(epoch_decimal)) try: with g_train.as_default(): if (FLAGS.profile is True) and ((step % PROFILE_FREQ) == 0): logger.info("Train with Profiling") run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE) run_metadata = tf.RunMetadata() else: run_options = None run_metadata = None if step % (num_batches_per_epoch/4) == 1: logger.info("Reset streaming metrics") sess_train.run([trn_reset]) tic = time.time() train_op_v, trn_metrics_v, trn_summary_v = sess_train.run( [train_op, trn_metrics, trn_summary], options=run_options, run_metadata=run_metadata) toc = time.time() trn_read_v = sess_train.run(trn_read) if run_options is not None: summary_writer.add_run_metadata( run_metadata, 'step{:d}'.format(step)) logger.info('TRN' + ' e-{:d}'.format(epoch) + ' stp-{:d}'.format(step) + ' {:.2f}s'.format(toc - tic) + ' loss: {:.4f}'.format(trn_metrics_v['loss']) + ' acc: {:.2f}%'.format(trn_metrics_v['acc']*100) ) except KeyboardInterrupt: sess_train.close() sess_val.close() sys.exit() except tf.errors.InvalidArgumentError as e: logger.warning('%d iteration contains NaN gradients. Discard.' % step) logger.error(str(e)) continue else: if (step % SUMMARY_FREQ) == 0: logger.info("Write Train Summary") with g_train.as_default(): summary_writer.add_summary(trn_summary_v, step) if (step % SAVE_MODEL_FREQ) == 100: logger.info("Save Model") with g_train.as_default(): train_checkpoint_dir = train_dir + '/checkpoint' if not os.path.exists(train_checkpoint_dir): os.makedirs(train_checkpoint_dir) ckpt_path = os.path.join(train_checkpoint_dir, 'model.ckpt') saver.save(sess_train, ckpt_path, global_step=step) if (step % VAL_FREQ) == 100: with g_val.as_default(): logger.info("Start Validation") latest_ckpt = tf.train.latest_checkpoint(train_checkpoint_dir) saver.restore(sess_val, latest_ckpt) accuracy_sum = 0 loss_sum = 0 sess_val.run(val_reset) for i in range(num_batches_val): val_metrics_v, val_summary_str_v = sess_val.run( [val_metrics, val_summary]) accuracy_sum += val_metrics_v['acc'] loss_sum += val_metrics_v['loss'] val_read_v = sess_val.run(val_read) ckpt_num = re.split('-', latest_ckpt)[-1] logger.info('VAL ckpt-{}'.format(ckpt_num) + ' bch-{:d}'.format(i) + ' cum_acc: {:.2f}%'.format(accuracy_sum/(i+1)*100) + ' cum_loss: {:.4f}'.format(loss_sum/(i+1)) ) ave_acc = accuracy_sum / num_batches_val ave_loss = loss_sum / num_batches_val logger.info('VAL ckpt-{}'.format(ckpt_num) + ' avg_acc: {:.2f}%'.format(ave_acc*100) + ' avg_loss: {:.4f}'.format(ave_loss) ) logger.info("Write Val Summary") summary_val = tf.Summary() summary_val.value.add(tag="val_acc", simple_value=ave_acc) summary_val.value.add(tag="val_loss", simple_value=ave_loss) summary_writer.add_summary(summary_val, step) sess_train.close() sess_val.close() sys.exit() def tower_fn(build_arch, x, y, scope, num_classes, is_train=True, reuse_variables=None): with tf.variable_scope(tf.get_variable_scope(), reuse=reuse_variables): output = build_arch(x, is_train, num_classes=num_classes) scores = output['scores'] loss = mod.total_loss(scores, y) return loss, scores def average_gradients(tower_grads): average_grads = [] for grad_and_vars in zip(*tower_grads): grads = [] for g, _ in grad_and_vars: expanded_g = tf.expand_dims(g, 0) grads.append(expanded_g) grad = tf.concat(axis=0, values=grads) grad = tf.reduce_mean(grad, 0) v = grad_and_vars[0][1] grad_and_var = (grad, v) average_grads.append(grad_and_var) return average_grads def extract_step(path): file_name = os.path.basename(path) return int(file_name.split('-')[-1]) def load_training(saver, session, load_dir): if tf.gfile.Exists(load_dir): ckpt = tf.train.get_checkpoint_state(load_dir) if ckpt and ckpt.model_checkpoint_path: saver.restore(session, ckpt.model_checkpoint_path) prev_step = extract_step(ckpt.model_checkpoint_path) logger.info("Restored checkpoint") else: raise IOError("""AG: load_ckpt directory exists but cannot find a valid checkpoint to resore, consider using the reset flag""") else: raise IOError("AG: load_ckpt directory does not exist") return prev_step
Apache License 2.0
cohesity/management-sdk-python
cohesity_management_sdk/models/user_ssh_key.py
UserSshKey.from_dictionary
python
def from_dictionary(cls, dictionary): if dictionary is None: return None ssh_key = dictionary.get('sshKey') username = dictionary.get('username') return cls(ssh_key, username)
Creates an instance of this model from a dictionary Args: dictionary (dictionary): A dictionary representation of the object as obtained from the deserialization of the server's response. The keys MUST match property names in the API description. Returns: object: An instance of this structure class.
https://github.com/cohesity/management-sdk-python/blob/1c085d5a10f5f1a87b700e7ad1fc1dcabda41ae5/cohesity_management_sdk/models/user_ssh_key.py#L35-L57
class UserSshKey(object): _names = { "ssh_key":'sshKey', "username":'username' } def __init__(self, ssh_key=None, username=None): self.ssh_key = ssh_key self.username = username @classmethod
Apache License 2.0
quay/quay
auth/auth_context_type.py
SignedAuthContext.is_anonymous
python
def is_anonymous(self): return self.kind == ContextEntityKind.anonymous
Returns true if this is an anonymous context.
https://github.com/quay/quay/blob/f50f37a393fa2273234f8ac0aa9f34a03a77a731/auth/auth_context_type.py#L463-L467
import logging from abc import ABCMeta, abstractmethod from cachetools.func import lru_cache from six import add_metaclass from app import app from data import model from flask_principal import Identity, identity_changed from auth.auth_context import set_authenticated_context from auth.context_entity import ContextEntityKind, CONTEXT_ENTITY_HANDLERS from auth.permissions import QuayDeferredPermissionUser from auth.scopes import scopes_from_scope_string logger = logging.getLogger(__name__) @add_metaclass(ABCMeta) class AuthContext(object): @property @abstractmethod def entity_kind(self): pass @property @abstractmethod def is_anonymous(self): pass @property @abstractmethod def authed_oauth_token(self): pass @property @abstractmethod def authed_user(self): pass @property @abstractmethod def has_nonrobot_user(self): pass @property @abstractmethod def identity(self): pass @property @abstractmethod def description(self): pass @property @abstractmethod def credential_username(self): pass @abstractmethod def analytics_id_and_public_metadata(self): pass @abstractmethod def apply_to_request_context(self): pass @abstractmethod def to_signed_dict(self): pass @property @abstractmethod def unique_key(self): pass class ValidatedAuthContext(AuthContext): def __init__( self, user=None, token=None, oauthtoken=None, robot=None, appspecifictoken=None, signed_data=None, ): self.user = user self.robot = robot self.token = token self.oauthtoken = oauthtoken self.appspecifictoken = appspecifictoken self.signed_data = signed_data def tuple(self): return list(vars(self).values()) def __eq__(self, other): return self.tuple() == other.tuple() @property def entity_kind(self): for kind in ContextEntityKind: if hasattr(self, kind.value) and getattr(self, kind.value): return kind return ContextEntityKind.anonymous @property def authed_user(self): authed_user = self._authed_user() if authed_user is not None and not authed_user.enabled: logger.warning("Attempt to reference a disabled user/robot: %s", authed_user.username) return None return authed_user @property def authed_oauth_token(self): return self.oauthtoken def _authed_user(self): if self.oauthtoken: return self.oauthtoken.authorized_user if self.appspecifictoken: return self.appspecifictoken.user if self.signed_data: return model.user.get_user(self.signed_data["user_context"]) return self.user if self.user else self.robot @property def is_anonymous(self): return not self.authed_user and not self.token and not self.signed_data @property def has_nonrobot_user(self): return bool(self.authed_user and not self.robot) @property def identity(self): if self.oauthtoken: scope_set = scopes_from_scope_string(self.oauthtoken.scope) return QuayDeferredPermissionUser.for_user(self.oauthtoken.authorized_user, scope_set) if self.authed_user: return QuayDeferredPermissionUser.for_user(self.authed_user) if self.token: return Identity(self.token.get_code(), "token") if self.signed_data: identity = Identity(None, "signed_grant") identity.provides.update(self.signed_data["grants"]) return identity return None @property def entity_reference(self): if self.entity_kind == ContextEntityKind.anonymous: return None return getattr(self, self.entity_kind.value) @property def description(self): handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]() return handler.description(self.entity_reference) @property def credential_username(self): handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]() return handler.credential_username(self.entity_reference) def analytics_id_and_public_metadata(self): handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]() return handler.analytics_id_and_public_metadata(self.entity_reference) def apply_to_request_context(self): set_authenticated_context(self) if self.identity: identity_changed.send(app, identity=self.identity) @property def unique_key(self): signed_dict = self.to_signed_dict() return "%s-%s" % (signed_dict["entity_kind"], signed_dict.get("entity_reference", "(anon)")) def to_signed_dict(self): dict_data = { "version": 2, "entity_kind": self.entity_kind.value, } if self.entity_kind != ContextEntityKind.anonymous: handler = CONTEXT_ENTITY_HANDLERS[self.entity_kind]() dict_data.update( { "entity_reference": handler.get_serialized_entity_reference( self.entity_reference ), } ) if self.token: dict_data.update( { "kind": "token", "token": self.token.get_code(), } ) if self.oauthtoken: dict_data.update( { "kind": "oauth", "oauth": self.oauthtoken.uuid, "user": self.authed_user.username, } ) if self.user or self.robot: dict_data.update( { "kind": "user", "user": self.authed_user.username, } ) if self.appspecifictoken: dict_data.update( { "kind": "user", "user": self.authed_user.username, } ) if self.is_anonymous: dict_data.update( { "kind": "anonymous", } ) return dict_data class SignedAuthContext(AuthContext): def __init__(self, kind, signed_data, v1_dict_format): self.kind = kind self.signed_data = signed_data self.v1_dict_format = v1_dict_format @property def unique_key(self): if self.v1_dict_format: return self._get_validated().unique_key signed_dict = self.signed_data return "%s-%s" % (signed_dict["entity_kind"], signed_dict.get("entity_reference", "(anon)")) @classmethod def build_from_signed_dict(cls, dict_data, v1_dict_format=False): if not v1_dict_format: entity_kind = ContextEntityKind(dict_data.get("entity_kind", "anonymous")) return SignedAuthContext(entity_kind, dict_data, v1_dict_format) kind_string = dict_data.get("kind", "anonymous") if kind_string == "oauth": kind_string = "oauthtoken" kind = ContextEntityKind(kind_string) return SignedAuthContext(kind, dict_data, v1_dict_format) @lru_cache(maxsize=1) def _get_validated(self): if not self.v1_dict_format: if self.kind == ContextEntityKind.anonymous: return ValidatedAuthContext() serialized_entity_reference = self.signed_data["entity_reference"] handler = CONTEXT_ENTITY_HANDLERS[self.kind]() entity_reference = handler.deserialize_entity_reference(serialized_entity_reference) if entity_reference is None: logger.debug( "Could not deserialize entity reference `%s` under kind `%s`", serialized_entity_reference, self.kind, ) return ValidatedAuthContext() return ValidatedAuthContext(**{self.kind.value: entity_reference}) kind_string = self.signed_data.get("kind", "anonymous") if kind_string == "oauth": kind_string = "oauthtoken" kind = ContextEntityKind(kind_string) if kind == ContextEntityKind.anonymous: return ValidatedAuthContext() if kind == ContextEntityKind.user or kind == ContextEntityKind.robot: user = model.user.get_user(self.signed_data.get("user", "")) if not user: return None return ( ValidatedAuthContext(robot=user) if user.robot else ValidatedAuthContext(user=user) ) if kind == ContextEntityKind.token: token = model.token.load_token_data(self.signed_data.get("token")) if not token: return None return ValidatedAuthContext(token=token) if kind == ContextEntityKind.oauthtoken: user = model.user.get_user(self.signed_data.get("user", "")) if not user: return None token_uuid = self.signed_data.get("oauth", "") oauthtoken = model.oauth.lookup_access_token_for_user(user, token_uuid) if not oauthtoken: return None return ValidatedAuthContext(oauthtoken=oauthtoken) raise Exception( "Unknown auth context kind `%s` when deserializing %s" % (kind, self.signed_data) ) @property def entity_kind(self): return self.kind @property
Apache License 2.0
pkgcore/pkgcore
src/pkgcore/resolver/pigeonholes.py
PigeonHoledSlots.check_limiters
python
def check_limiters(self, obj): key = obj.key return [x for x in self.limiters.get(key, ()) if x.match(obj)]
return any limiters conflicting w/ the passed in obj
https://github.com/pkgcore/pkgcore/blob/6c57606c15101590f4eed81636ae583f3f900d6a/src/pkgcore/resolver/pigeonholes.py#L57-L60
__all__ = ("PigeonHoledSlots",) from ..restrictions import restriction class PigeonHoledSlots: def __init__(self): self.slot_dict = {} self.limiters = {} def fill_slotting(self, obj, force=False): l = self.check_limiters(obj) key = obj.key dslot = obj.slot l.extend(x for x in self.slot_dict.get(key, ()) if x.slot == dslot) if not l or force: self.slot_dict.setdefault(key, []).append(obj) return l def get_conflicting_slot(self, pkg): for x in self.slot_dict.get(pkg.key, ()): if pkg.slot == x.slot: return x return None def find_atom_matches(self, atom, key=None): if key is None: key = atom.key return list(filter(atom.match, self.slot_dict.get(key, ()))) def add_limiter(self, atom, key=None): if not isinstance(atom, restriction.base): raise TypeError( f"atom must be a restriction.base derivative: got {atom!r}, key={key!r}") if key is None: key = atom.key self.limiters.setdefault(key, []).append(atom) return self.find_atom_matches(atom, key=key)
BSD 3-Clause New or Revised License
openstack-archive/congress
congress/datasources/datasource_driver.py
DataSourceDriver.state_set_diff
python
def state_set_diff(self, state1, state2, table=None): if table is None: diff = {} for tablename in state1: if tablename not in state2: diff[tablename] = set(state1[tablename]) else: diff[tablename] = state1[tablename] - state2[tablename] return diff else: if table not in state1: return set() if table not in state2: return set(state1[table]) else: return state1[table] - state2[table]
Return STATE1 - STATE2. Given 2 tuplesets STATE1 and STATE2, return the set difference STATE1-STATE2. Each tupleset is represented as a dictionary from tablename to set of tuples. Return value is a tupleset, also represented as a dictionary from tablename to set of tuples.
https://github.com/openstack-archive/congress/blob/85243abf63dfc7c086e28e9bdb3fb0b7c9d2ad94/congress/datasources/datasource_driver.py#L711-L735
from __future__ import print_function from __future__ import division from __future__ import absolute_import import collections import copy import datetime from functools import cmp_to_key from functools import reduce import hashlib import inspect import json import time import eventlet from oslo_log import log as logging from oslo_utils import strutils import six import yaml from congress.datasources import datasource_utils as ds_utils from congress.db import db_ds_table_data from congress.dse2 import data_service from congress import exception from congress import utils LOG = logging.getLogger(__name__) class DataSourceDriver(data_service.DataService): HDICT = 'HDICT' VDICT = 'VDICT' LIST = 'LIST' VALUE = 'VALUE' DICT_SELECTOR = 'DICT_SELECTOR' DOT_SELECTOR = 'DOT_SELECTOR' TRANSLATION_TYPE = 'translation-type' TABLE_NAME = 'table-name' PARENT_KEY = 'parent-key' ID_COL = 'id-col' ID_COL_NAME = 'id-col' SELECTOR_TYPE = 'selector-type' FIELD_TRANSLATORS = 'field-translators' FIELDNAME = 'fieldname' TRANSLATOR = 'translator' COL = 'col' KEY_COL = 'key-col' VAL_COL = 'val-col' VAL_COL_DESC = 'val-col-desc' EXTRACT_FN = 'extract-fn' IN_LIST = 'in-list' OBJECTS_EXTRACT_FN = 'objects-extract-fn' DESCRIPTION = 'desc' DATA_TYPE = 'data-type' NULLABLE = 'nullable' PARENT_KEY_COL_NAME = 'parent_key' PARENT_COL_NAME = 'parent-col-name' PARENT_KEY_DESC = 'parent-key-desc' HDICT_PARAMS = (TRANSLATION_TYPE, TABLE_NAME, PARENT_KEY, ID_COL, SELECTOR_TYPE, FIELD_TRANSLATORS, IN_LIST, PARENT_COL_NAME, OBJECTS_EXTRACT_FN, PARENT_KEY_DESC) FIELD_TRANSLATOR_PARAMS = (FIELDNAME, COL, DESCRIPTION, TRANSLATOR) VDICT_PARAMS = (TRANSLATION_TYPE, TABLE_NAME, PARENT_KEY, ID_COL, KEY_COL, VAL_COL, TRANSLATOR, PARENT_COL_NAME, OBJECTS_EXTRACT_FN) LIST_PARAMS = (TRANSLATION_TYPE, TABLE_NAME, PARENT_KEY, ID_COL, VAL_COL, TRANSLATOR, PARENT_COL_NAME, OBJECTS_EXTRACT_FN, PARENT_KEY_DESC, VAL_COL_DESC) VALUE_PARAMS = (TRANSLATION_TYPE, EXTRACT_FN, DATA_TYPE, NULLABLE) TRANSLATION_TYPE_PARAMS = (TRANSLATION_TYPE,) VALID_TRANSLATION_TYPES = (HDICT, VDICT, LIST, VALUE) TRANSLATORS = [] def __init__(self, name='', args=None): self.name = name self.type = 'datasource_driver' self.initialized = False self.last_updated_time = None self.last_error = None self.number_of_updates = 0 self.ds_id = args.get('ds_id') if args is not None else None self.prior_state = dict() self.state = dict() self.raw_state = dict() self._translators = [] self._schema = {} self._table_deps = {} self.initialize_translators() super(DataSourceDriver, self).__init__(name) if hasattr(self, 'add_rpc_endpoint'): self.add_rpc_endpoint(DataSourceDriverEndpoints(self)) def get_snapshot(self, table_name): LOG.debug("datasource_driver get_snapshot(%s); %s", table_name, self.state) return self.state.get(table_name, set()) def _make_tmp_state(self, root_table_name, row_data): tmp_state = {} for table in self._table_deps[root_table_name]: tmp_state.setdefault(table, set()) for table, row in row_data: if table in tmp_state: tmp_state[table].add(row) else: LOG.warning('table %s is undefined in translators', table) return tmp_state def _update_state(self, root_table_name, row_data): tmp_state = self._make_tmp_state(root_table_name, row_data) for table in tmp_state: self.state[table] = tmp_state[table] def _get_translator_params(self, translator_type): if translator_type is self.HDICT: return self.HDICT_PARAMS elif translator_type is self.VDICT: return self.VDICT_PARAMS elif translator_type is self.LIST: return self.LIST_PARAMS elif translator_type is self.VALUE: return self.VALUE_PARAMS else: raise TypeError("Invalid translator_type") def _validate_non_value_type_properties(self, translator): parent_key = translator.get(self.PARENT_KEY) id_col = translator.get(self.ID_COL) if parent_key and id_col: raise exception.InvalidParamException( 'Specify at most one of %s or %s' % (self.PARENT_KEY, self.ID_COL)) def _validate_hdict_type(self, translator, related_tables): field_translators = translator[self.FIELD_TRANSLATORS] for field_translator in field_translators: self.check_params(field_translator.keys(), self.FIELD_TRANSLATOR_PARAMS) subtranslator = field_translator[self.TRANSLATOR] self._validate_translator(subtranslator, related_tables) def _validate_list_type(self, translator, related_tables): if self.VAL_COL not in translator: raise exception.InvalidParamException( "Param (%s) must be in translator" % self.VAL_COL) subtranslator = translator[self.TRANSLATOR] self._validate_translator(subtranslator, related_tables) def _validate_vdict_type(self, translator, related_tables): if self.KEY_COL not in translator: raise exception.InvalidParamException( "Param (%s) must be in translator" % self.KEY_COL) if self.VAL_COL not in translator: raise exception.InvalidParamException( "Param (%s) must be in translator" % self.VAL_COL) subtranslator = translator[self.TRANSLATOR] self._validate_translator(subtranslator, related_tables) def _validate_by_translation_type(self, translator, related_tables): translation_type = translator[self.TRANSLATION_TYPE] self.check_params(translator.keys(), self._get_translator_params(translation_type)) if translation_type is not self.VALUE: self._validate_non_value_type_properties(translator) table_name = translator[self.TABLE_NAME] if table_name in self.state: raise exception.DuplicateTableName( 'table (%s) used twice' % table_name) self.state[table_name] = set() related_tables.append(table_name) if translation_type is self.HDICT: self._validate_hdict_type(translator, related_tables) elif translation_type is self.LIST: self._validate_list_type(translator, related_tables) elif translation_type is self.VDICT: self._validate_vdict_type(translator, related_tables) def _validate_translator(self, translator, related_tables): translation_type = translator.get(self.TRANSLATION_TYPE) if self.TRANSLATION_TYPE not in translator: raise exception.InvalidParamException( "Param (%s) must be in translator" % self.TRANSLATION_TYPE) if translation_type not in self.VALID_TRANSLATION_TYPES: msg = ("Translation Type %s not a valid translation-type %s" % ( translation_type, self.VALID_TRANSLATION_TYPES)) raise exception.InvalidTranslationType(msg) self._validate_by_translation_type(translator, related_tables) def initialize_translators(self): for translator in self.TRANSLATORS: self.register_translator(translator) def register_translator(self, translator): related_tables = [] if self.TABLE_NAME in translator: self._table_deps[translator[self.TABLE_NAME]] = related_tables self._validate_translator(translator, related_tables) self._translators.append(translator) self._schema.update(self._get_schema(translator, {}).schema) def get_translator(self, translator_name): translator = [t for t in self.get_translators() if t['table-name'] == translator_name] if len(translator) > 0: return translator[0] else: msg = ('translator: %s is not in the datasource' ' driver' % translator_name) raise exception.BadRequest(msg) def get_translators(self): return self._translators SCHEMA_RETURN_TUPLE = collections.namedtuple('SchemaReturnTuple', 'schema id_type') @classmethod def _get_schema_hdict(cls, translator, schema, parent_key_type=None): tablename = translator[cls.TABLE_NAME] parent_key = translator.get(cls.PARENT_KEY, None) id_col = translator.get(cls.ID_COL, None) field_translators = translator[cls.FIELD_TRANSLATORS] parent_col_name = None columns = [] if id_col is not None: columns.append(ds_utils.add_column(cls._id_col_name(id_col))) elif parent_key is not None: parent_col_name = translator.get(cls.PARENT_COL_NAME, cls.PARENT_KEY_COL_NAME) desc = translator.get(cls.PARENT_KEY_DESC) columns.append(ds_utils.add_column( parent_col_name, desc, type=parent_key_type)) field_translators_with_order = [ (index, trans) for index, trans in enumerate(field_translators)] field_translators_sorted = sorted( field_translators_with_order, key=cmp_to_key( cls._compare_tuple_by_subtranslator)) columns_indexed = {} def get_current_table_col_type(name): if parent_col_name and parent_col_name == name: return parent_key_type elif name == cls._id_col_name(id_col): return None else: [type] = [column_schema.get('type') for column_schema in columns_indexed.values() if column_schema.get('name') == name] return type for (index, field_translator) in field_translators_sorted: col = field_translator.get( cls.COL, field_translator[cls.FIELDNAME]) desc = field_translator.get(cls.DESCRIPTION) subtranslator = field_translator[cls.TRANSLATOR] if cls.PARENT_KEY in subtranslator: cls._get_schema(subtranslator, schema, parent_key_type=get_current_table_col_type( subtranslator[cls.PARENT_KEY])) else: field_type = subtranslator.get(cls.DATA_TYPE) nullable = subtranslator.get(cls.NULLABLE, True) columns_indexed[index] = ds_utils.add_column( col, desc, field_type, nullable) cls._get_schema(subtranslator, schema) for index in range(0, len(field_translators)): if index in columns_indexed: columns.append(columns_indexed[index]) if tablename in schema: raise exception.InvalidParamException( "table %s already in schema" % tablename) schema[tablename] = tuple(columns) return cls.SCHEMA_RETURN_TUPLE(schema, None) @classmethod def _get_schema_vdict(cls, translator, schema, parent_key_type=None): tablename = translator[cls.TABLE_NAME] parent_key = translator.get(cls.PARENT_KEY, None) id_col = translator.get(cls.ID_COL, None) key_col = translator[cls.KEY_COL] value_col = translator[cls.VAL_COL] subtrans = translator[cls.TRANSLATOR] cls._get_schema(subtrans, schema) if tablename in schema: raise exception.InvalidParamException( "table %s already in schema" % tablename) new_schema = (key_col,) if id_col: new_schema = (cls._id_col_name(id_col),) + new_schema elif parent_key: parent_col_name = translator.get(cls.PARENT_COL_NAME, cls.PARENT_KEY_COL_NAME) new_schema = (parent_col_name,) + new_schema if cls.PARENT_KEY not in subtrans: new_schema = new_schema + (value_col,) schema[tablename] = new_schema return cls.SCHEMA_RETURN_TUPLE(schema, None) @classmethod def _get_schema_list(cls, translator, schema, parent_key_type=None): tablename = translator[cls.TABLE_NAME] parent_key = translator.get(cls.PARENT_KEY, None) id_col = translator.get(cls.ID_COL, None) value_col = translator[cls.VAL_COL] val_desc = translator.get(cls.VAL_COL_DESC) trans = translator[cls.TRANSLATOR] cls._get_schema(trans, schema) if tablename in schema: raise exception.InvalidParamException( "table %s already in schema" % tablename) if id_col: schema[tablename] = (ds_utils.add_column(cls._id_col_name(id_col)), ds_utils.add_column(value_col)) elif parent_key: parent_col_name = translator.get(cls.PARENT_COL_NAME, cls.PARENT_KEY_COL_NAME) desc = translator.get(cls.PARENT_KEY_DESC) schema[tablename] = (ds_utils.add_column(parent_col_name, desc), ds_utils.add_column(value_col, val_desc)) else: schema[tablename] = (ds_utils.add_column(value_col, val_desc), ) return cls.SCHEMA_RETURN_TUPLE(schema, None) @classmethod def _get_schema(cls, translator, schema, parent_key_type=None): cls.check_translation_type(translator.keys()) translation_type = translator[cls.TRANSLATION_TYPE] if translation_type == cls.HDICT: return cls._get_schema_hdict(translator, schema, parent_key_type) elif translation_type == cls.VDICT: return cls._get_schema_vdict(translator, schema, parent_key_type) elif translation_type == cls.LIST: return cls._get_schema_list(translator, schema, parent_key_type) elif translation_type == cls.VALUE: return cls.SCHEMA_RETURN_TUPLE(schema, None) else: raise AssertionError('Unexpected translator type %s' % translation_type) @classmethod def get_schema(cls): all_schemas = {} for trans in cls.TRANSLATORS: cls._get_schema(trans, all_schemas) return all_schemas @classmethod def get_tablename(cls, table_id): return table_id if table_id in cls.get_tablenames() else None @classmethod def get_tablenames(cls): return set(cls.get_schema().keys()) def get_row_data(self, table_id, *args, **kwargs): results = [] try: table_state = self.state[table_id] except KeyError: m = ("tablename '%s' does not exist'" % (table_id)) LOG.debug(m) raise exception.NotFound(m) for tup in table_state: d = {} d['data'] = utils.tuple_to_congress(tup) results.append(d) return results def get_column_map(self, tablename): schema = self.get_schema() if tablename not in schema: return col_map = {} for index, name in enumerate(schema[tablename]): if isinstance(name, dict): col_map[name['name']] = index else: col_map[name] = index return col_map
Apache License 2.0
ahwillia/pyneuron-toolbox
PyNeuronToolbox/neuromorpho.py
metadata
python
def metadata(neuron_name): html = urllib2.urlopen('http://neuromorpho.org/neuron_info.jsp?neuron_name=%s' % neuron_name).read() html = html.replace('&nbsp;', ' ') html = html.replace('&#956;m<sup>2</sup>', ' ') html = html.replace('&#956;m', ' ') html = html.replace('&deg;', ' ') html = html.replace('<b>x</b>', ' ') html = html.replace('<sup>3</sup>', '') html2 = html.replace('\n', '') keys = [i[1][:-3].strip() for i in re.findall('<td align="right" width="50%"(.*?)>(.*?)</td>', html2)] values = [i[1].strip() for i in re.findall('<td align="left"(.*?)>(.*?)</td>', html2)[2:]] return dict(zip(keys, values))
Return a dict of the metadata for the specified neuron. Example: metadata('mb100318-a')
https://github.com/ahwillia/pyneuron-toolbox/blob/fedb9a23553b8375b02768d6406376558b0eaeff/PyNeuronToolbox/neuromorpho.py#L89-L110
import urllib2 import re import json import base64 _cache = {} def _read_neuromorpho_table(bywhat): html = urllib2.urlopen('http://neuromorpho.org/by%s.jsp' % bywhat).read() result = [m.strip() for m in re.findall("maketable\('(.*?)'\)", html)] _cache[bywhat] = set(result) return result def cell_types(): return _read_neuromorpho_table('cell') def species(): return _read_neuromorpho_table('species') def regions(): return _read_neuromorpho_table('region') def cell_names(category): for bywhat, items in _cache.iteritems(): if category in items: return _get_data_for_by(bywhat, category) for bywhat in ['cell', 'region', 'species']: result = _get_data_for_by(bywhat, category) if result: return result return [] def _get_data_for_by(bywhat, category): query_code = bywhat if bywhat != 'cell' else 'class' html = urllib2.urlopen('http://neuromorpho.org/getdataforby%s.jsp?%s=%s' % (bywhat, query_code, category.replace(' ', '%20'))).read() return [m for m in re.findall("neuron_name=(.*?)'", html)]
MIT License
urinx/weixinbot
wxbot_project_py2.7/db/mysql_db.py
MysqlDB.create_table
python
def create_table(self, table, cols): if table not in self.table_cols: sql = 'CREATE TABLE IF NOT EXISTS %s(id int primary key auto_increment, %s) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci' % (table, cols) Log.debug('DB -> %s' % sql) self.execute(sql) self.table_cols[table] = ['id'] + [c.strip().split(' ')[0] for c in cols.split(',')]
@brief Creates a table in database @param table String @param cols String, the cols in table
https://github.com/urinx/weixinbot/blob/d9edcd2c9203fe7dd203b22b71bbc48a31e9492b/wxbot_project_py2.7/db/mysql_db.py#L81-L91
from config import Log import pymysql import threading import traceback def array_join(arr, c): t = '' for a in arr: t += "'%s'" % str(a).replace("'","\\\'") + c return t[:-len(c)] class MysqlDB(object): def __init__(self, conf): self.conf = conf config = { 'host': conf['host'], 'port': conf['port'], 'user': conf['user'], 'passwd': conf['passwd'], 'charset':'utf8mb4', 'cursorclass': pymysql.cursors.DictCursor } self.conn = pymysql.connect(**config) self.conn.autocommit(1) self.lock = threading.Lock() self.create_db(conf['database']) self.conn.select_db(conf['database']) self.table_cols = {} for t in self.show_tables(): self.table_cols[t] = self.get_table_column_name(t) def show_database(self): c = self.conn.cursor() sql = 'SHOW DATABASES' Log.debug('DB -> %s' % sql) c.execute(sql) return [r['Database'] for r in c.fetchall()] def show_tables(self): c = self.conn.cursor() sql = 'SHOW TABLES' Log.debug('DB -> %s' % sql) c.execute(sql) return [r['Tables_in_'+self.conf['database']] for r in c.fetchall()] def create_db(self, db_name): if self.conf['database'] not in self.show_database(): sql = 'CREATE DATABASE IF NOT EXISTS %s CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci' % db_name Log.debug('DB -> %s' % sql) self.execute(sql)
Apache License 2.0
barronalex/dynamic-memory-networks-in-tensorflow
dmn_plus.py
_add_gradient_noise
python
def _add_gradient_noise(t, stddev=1e-3, name=None): with tf.variable_scope('gradient_noise'): gn = tf.random_normal(tf.shape(t), stddev=stddev) return tf.add(t, gn)
Adds gradient noise as described in http://arxiv.org/abs/1511.06807 The input Tensor `t` should be a gradient. The output will be `t` + gaussian noise. 0.001 was said to be a good fixed value for memory networks.
https://github.com/barronalex/dynamic-memory-networks-in-tensorflow/blob/6b35d5b397f70656d243855fcb24fd8dd1779e70/dmn_plus.py#L55-L62
from __future__ import print_function from __future__ import division import sys import time import numpy as np from copy import deepcopy import tensorflow as tf from attention_gru_cell import AttentionGRUCell from tensorflow.contrib.cudnn_rnn.python.ops import cudnn_rnn_ops import babi_input class Config(object): batch_size = 100 embed_size = 80 hidden_size = 80 max_epochs = 256 early_stopping = 20 dropout = 0.9 lr = 0.001 l2 = 0.001 cap_grads = False max_grad_val = 10 noisy_grads = False word2vec_init = False embedding_init = np.sqrt(3) anneal_threshold = 1000 anneal_by = 1.5 num_hops = 3 num_attention_features = 4 max_allowed_inputs = 130 num_train = 9000 floatX = np.float32 babi_id = "1" babi_test_id = "" train_mode = True
MIT License