repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
eric612/mobilenet-yolo-pytorch
utils/image_augmentation.py
Image_Augmentation.transform_od
python
def transform_od(self,image, boxes, labels, difficulties, mean = [0.485, 0.456, 0.406],std = [0.229, 0.224, 0.225],phase = 'train',expand = True,expand_scale = 1.5): assert phase in {'train', 'test'} new_image = image new_boxes = boxes new_labels = labels new_difficulties = difficulties if phase == 'train': new_image = self.photometric_distort(new_image) new_image = FT.to_tensor(new_image) if random.random() < 0.5 and expand==True: new_image, new_boxes = self.expand_od(new_image, boxes, filler=mean,expand_scale=expand_scale) new_image, new_boxes, new_labels, new_difficulties = self.random_crop_od(new_image, new_boxes, new_labels, new_difficulties) new_image = FT.to_pil_image(new_image) if random.random() < 0.5: new_image, new_boxes = self.flip_od(new_image, new_boxes) return new_image, new_boxes, new_labels, new_difficulties
Apply the transformations above. :param image: image, a PIL Image :param boxes: bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4) :param labels: labels of objects, a tensor of dimensions (n_objects) :param difficulties: difficulties of detection of these objects, a tensor of dimensions (n_objects) :param split: one of 'TRAIN' or 'TEST', since different sets of transformations are applied :param dims: (H, W) :return: transformed image, transformed bounding box coordinates, transformed labels, transformed difficulties
https://github.com/eric612/mobilenet-yolo-pytorch/blob/5ef0ff5944ac2d71152827361014b2c193e4b26c/utils/image_augmentation.py#L270-L319
import numpy as np import torch import random import torchvision.transforms.functional as FT from torchvision import transforms from PIL import Image, ImageDraw, ImageFont import cv2 from utils.iou import* class Image_Augmentation(): def expand_od(self,image, boxes, filler,expand_scale): original_h = image.size(1) original_w = image.size(2) max_scale = expand_scale scale = random.uniform(1, max_scale) new_h = int(scale * original_h) new_w = int(scale * original_w) filler = torch.FloatTensor(filler) new_image = torch.ones((3, new_h, new_w), dtype=torch.float) * filler.unsqueeze(1).unsqueeze(1) left = random.randint(0, new_w - original_w) right = left + original_w top = random.randint(0, new_h - original_h) bottom = top + original_h new_image[:, top:bottom, left:right] = image new_boxes = boxes + torch.FloatTensor([left, top, left, top]).unsqueeze(0) return new_image, new_boxes def random_crop_od(self,image, boxes, labels, difficulties): original_h = image.size(1) original_w = image.size(2) while True: min_overlap = random.choice([0., .1, .3, .5, .7, .9, None]) if min_overlap is None: return image, boxes, labels, difficulties max_trials = 50 for _ in range(max_trials): min_scale = 0.3 scale_h = random.uniform(min_scale, 1) scale_w = random.uniform(min_scale, 1) new_h = int(scale_h * original_h) new_w = int(scale_w * original_w) aspect_ratio = new_h / new_w if not 0.5 < aspect_ratio < 2: continue left = random.randint(0, original_w - new_w) right = left + new_w top = random.randint(0, original_h - new_h) bottom = top + new_h crop = torch.FloatTensor([left, top, right, bottom]) overlap = find_jaccard_overlap(crop.unsqueeze(0), boxes) overlap = overlap.squeeze(0) if overlap.max().item() < min_overlap: continue new_image = image[:, top:bottom, left:right] bb_centers = (boxes[:, :2] + boxes[:, 2:]) / 2. centers_in_crop = (bb_centers[:, 0] > left) * (bb_centers[:, 0] < right) * (bb_centers[:, 1] > top) * ( bb_centers[:, 1] < bottom) if not centers_in_crop.any(): continue new_boxes = boxes[centers_in_crop, :] new_labels = labels[centers_in_crop] new_difficulties = difficulties[centers_in_crop] new_boxes[:, :2] = torch.max(new_boxes[:, :2], crop[:2]) new_boxes[:, :2] -= crop[:2] new_boxes[:, 2:] = torch.min(new_boxes[:, 2:], crop[2:]) new_boxes[:, 2:] -= crop[:2] return new_image, new_boxes, new_labels, new_difficulties def flip_od(self,image, boxes): new_image = FT.hflip(image) new_boxes = boxes new_boxes[:, 0] = image.width - boxes[:, 0] - 1 new_boxes[:, 2] = image.width - boxes[:, 2] - 1 new_boxes = new_boxes[:, [2, 1, 0, 3]] return new_image, new_boxes def photometric_distort(self,image): new_image = image distortions = [FT.adjust_brightness, FT.adjust_contrast, FT.adjust_saturation, FT.adjust_hue, FT.adjust_gamma] random.shuffle(distortions) for d in distortions: if random.random() < 0.5: if d.__name__ is 'adjust_hue': adjust_factor = random.uniform(-18 / 255., 18 / 255.) else: adjust_factor = random.uniform(0.5, 1.5) new_image = d(new_image, adjust_factor) return new_image def generate_mosaic_mask(self,num,size): mosaic_mask = [[0,0,size[0],size[1]]] x_center = int(random.uniform(.25,.75)*size[0]) y_center = int(random.uniform(.25,.75)*size[1]) if num == 2 : mosaic_mask1 = [[0,0,x_center,size[1]],[x_center,0,size[0],size[1]]] mosaic_mask2 = [[0,0,size[0],y_center],[0,y_center,size[0],size[1]]] mosaic_mask = random.choice([mosaic_mask1,mosaic_mask2]) elif num == 3 : mosaic_mask1 = [[0,0,size[0],y_center],[0,y_center,x_center,size[1]],[x_center,y_center,size[0],size[1]]] mosaic_mask2 = [[0,0,x_center,y_center],[x_center,0,size[0],y_center],[0,y_center,size[0],size[1]]] mosaic_mask3 = [[0,0,x_center,size[1]],[x_center,0,size[0],y_center],[x_center,y_center,size[0],size[1]]] mosaic_mask4 = [[0,0,x_center,y_center],[x_center,0,size[0],size[1]],[0,y_center,x_center,size[1]]] mosaic_mask = random.choice([mosaic_mask1,mosaic_mask2,mosaic_mask3,mosaic_mask4]) elif num == 4 : mosaic_mask = [[0,0,x_center,y_center],[x_center,0,size[0],y_center],[0,y_center,x_center,size[1]],[x_center,y_center,size[0],size[1]]] return mosaic_mask def Mosaic(self,source,size): new_data = list() background = np.zeros((size[0],size[1],3)) counter = 0 num = len(source) mosaic_mask = self.generate_mosaic_mask(num,size) new_labels = torch.Tensor(0,5) for img,label in source : width, height = (mosaic_mask[counter][2]-mosaic_mask[counter][0]),(mosaic_mask[counter][3]-mosaic_mask[counter][1]) aspect_ratio_src = img.height/img.width min_ratio,max_ratio = aspect_ratio_src*0.5 , aspect_ratio_src*2 aspect_ratio_tar = height/width offset_x = 0 offset_y = 0 if aspect_ratio_tar<min_ratio : scale = 1/min_ratio offset_x = random.randint(0, int(width-height*scale)) width = int(height*scale) if aspect_ratio_tar>max_ratio : offset_y = random.randint(0, int(height-width*max_ratio)) height = int(width*max_ratio) new_img = img.resize((width,height)) new_img = np.array(new_img) mean = np.mean(new_img, axis=tuple(range(new_img.ndim-1))) x1 = mosaic_mask[counter][0]+offset_x y1 = mosaic_mask[counter][1]+offset_y x2 = min(mosaic_mask[counter][2],x1+width) y2 = min(mosaic_mask[counter][3],y1+height) background[mosaic_mask[counter][1]:mosaic_mask[counter][3],mosaic_mask[counter][0]:mosaic_mask[counter][2]] = mean background[y1:y2,x1:x2] = new_img if label.size(0): new_box = label[...,1:5] w_scale = (size[0]/width) h_scale = (size[1]/height) new_box[...,0],new_box[...,2] = new_box[...,0]/w_scale,new_box[...,2]/w_scale new_box[...,1],new_box[...,3] = new_box[...,1]/h_scale,new_box[...,3]/h_scale new_box[...,0] = new_box[...,0] + (mosaic_mask[counter][0]+offset_x)/size[0] new_box[...,1] = new_box[...,1] + (mosaic_mask[counter][1]+offset_y)/size[1] new_label = torch.cat((label[...,0].unsqueeze(1),new_box),1) new_labels = torch.cat((new_labels,new_label)) counter = counter + 1 new_img = Image.fromarray(background.astype(np.uint8)) new_data = [new_img,new_labels] return new_data
MIT License
unity-technologies/datasetinsights
datasetinsights/dashboard.py
render_content
python
def render_content(value, json_data_root): data_root = json.loads(json_data_root) if value == "dataset_overview": return overview.html_overview(data_root) elif value == "object_detection": return render_object_detection_layout(data_root)
Method for rendering dashboard layout based on the selected tab value. Args: value(str): selected tab value json_data_root: data root stored in hidden div in json format. Returns: html layout: layout for the selected tab.
https://github.com/unity-technologies/datasetinsights/blob/0c6e2407f3b6ceb7a38cb82e3bbcf41a6c2d4672/datasetinsights/dashboard.py#L85-L101
import argparse import json import os import dash_core_components as dcc import dash_html_components as html from dash.dependencies import Input, Output import datasetinsights.stats.visualization.overview as overview from datasetinsights.stats.visualization.app import get_app from datasetinsights.stats.visualization.object_detection import ( render_object_detection_layout, ) app = get_app() def main_layout(): app_layout = html.Div( [ html.H1( children="Dataset Insights", style={ "textAlign": "center", "padding": 20, "background": "lightgrey", }, ), html.Div( [ dcc.Tabs( id="page_tabs", value="dataset_overview", children=[ dcc.Tab( label="Overview", value="dataset_overview", ), dcc.Tab( label="Object Detection", value="object_detection", ), ], ), html.Div(id="main_page_tabs"), ] ), dcc.Dropdown(id="dropdown", style={"display": "none"}), html.Div(id="data_root_value", style={"display": "none"}), ] ) return app_layout @app.callback( Output("data_root_value", "children"), [Input("dropdown", "value")] ) def store_data_root(value): json_data_root = json.dumps(data_root) return json_data_root @app.callback( Output("main_page_tabs", "children"), [Input("page_tabs", "value"), Input("data_root_value", "children")], )
Apache License 2.0
esukhia/botok
botok/modifytokens/tokenmerge.py
TokenMerge.__merge_syls_idx
python
def __merge_syls_idx(self): first_syl = True if self.token2.syls_idx: for syl in self.token2.syls_idx: if syl: new_syl = [i + self.token1.len for i in syl] if ( first_syl and (self.token1.affix_host and not self.token1.affix) and (not self.token2.affix_host and self.token2.affix) ): self.merged.syls_idx[-1] += new_syl self.merged.affix = True first_syl = False else: self.merged.syls_idx.append(new_syl)
Updates indices and add the syls to the merged object Re-joins the host-syllable and affixed particle syllables into a single one; then, affix is True and affixed also, so cleaned_content gets its tsek.
https://github.com/esukhia/botok/blob/9009581cc290c800e7d93a405969e10a7c9d2f51/botok/modifytokens/tokenmerge.py#L68-L90
import copy from ..third_party.cqlparser import replace_token_attributes class TokenMerge: def __init__(self, token1, token2, token_changes=None): self.token1 = token1 self.token2 = token2 self.merged = copy.deepcopy(token1) if not self.merged.syls_idx: self.merged.syls_idx = [] if not self.merged.syls: self.merged.syls = [] self.token_changes = token_changes def merge(self): self.merge_attrs() self.replace_attrs() return self.merged def replace_attrs(self): if self.token_changes: replace_token_attributes(self.merged, self.token_changes) def merge_attrs(self): self.__merge_texts() self.__merge_indices() self.__merge_syls_idx() self.__merge_syls_start_end() self.__del_lemma() def __merge_texts(self): self.merged.text += self.token2.text def __merge_indices(self): self.merged.len += self.token2.len def __merge_syls_start_end(self): if ( not self.merged.syls_start_end or not self.token1.syls_start_end or not self.token2.syls_start_end ): return if ( self.token1.affix_host and not self.token1.affix and not self.token2.affix_host and self.token2.affix ): self.merged.syls_start_end[-1]["end"] = self.token2.syls_start_end[0]["end"] self.merged.syls_start_end.extend(self.token2.syls_start_end[1:]) else: self.merged.syls_start_end.extend(self.token2.syls_start_end)
Apache License 2.0
4dnucleome/partseg
package/PartSegCore/analysis/calculation_plan.py
MaskMapper.get_mask_path
python
def get_mask_path(self, file_path: str) -> str:
Calculate mask path based od file_path :param file_path: path to proceeded file
https://github.com/4dnucleome/partseg/blob/f6bb1bb02c006f2e009e873a0e3bad87469cc90e/package/PartSegCore/analysis/calculation_plan.py#L177-L182
import logging import os import sys import textwrap import typing import uuid from abc import abstractmethod from copy import copy, deepcopy from enum import Enum from ..algorithm_describe_base import ROIExtractionProfile from ..class_generator import BaseSerializableClass, enum_register from ..mask_create import MaskProperty from ..universal_const import Units from . import analysis_algorithm_dict from .measurement_calculation import MeasurementProfile class MaskBase: name: str class RootType(Enum): Image = 0 Project = 1 Mask_project = 2 def __str__(self): return self.name.replace("_", " ") enum_register.register_class(RootType) class MaskCreate(MaskBase, BaseSerializableClass): mask_property: MaskProperty def __str__(self): return f"Mask create: {self.name}\n" + str(self.mask_property).split("\n", 1)[1] class MaskUse(MaskBase, BaseSerializableClass): class MaskSum(MaskBase, BaseSerializableClass): mask1: str mask2: str class MaskIntersection(MaskBase, BaseSerializableClass): mask1: str mask2: str class Save(BaseSerializableClass): suffix: str directory: str algorithm: str short_name: str values: dict class MeasurementCalculate(BaseSerializableClass): __old_names__ = "StatisticCalculate" channel: int units: Units measurement_profile: MeasurementProfile name_prefix: str @typing.overload def __init__(self, channel: int, units: Units, measurement_profile: MeasurementProfile, name_prefix: str): ... @property def name(self): return self.measurement_profile.name def __str__(self): channel = "Like segmentation" if self.channel == -1 else str(self.channel) desc = str(self.measurement_profile).split("\n", 1)[1] return f"MeasurementCalculate \nChannel: {channel}\nUnits: {self.units}\n{desc}\n" def get_save_path(op: Save, calculation: "FileCalculation") -> str: from PartSegCore.analysis.save_functions import save_dict extension = save_dict[op.algorithm].get_default_extension() rel_path = os.path.relpath(calculation.file_path, calculation.base_prefix) rel_path = os.path.splitext(rel_path)[0] if op.directory: file_name = os.path.basename(rel_path) base_rel_path = os.path.dirname(rel_path) return os.path.join(calculation.result_prefix, base_rel_path, op.directory, file_name + op.suffix + extension) return os.path.join(calculation.result_prefix, rel_path + op.suffix + extension) class MaskMapper: name: str @abstractmethod
BSD 3-Clause New or Revised License
aoikuiyuyou/aoikhotkey
src/aoikhotkey/hotkey_spec_parser.py
tag_call_with_info
python
def tag_call_with_info(func): setattr(func, _TAG_CALL_WITH_INFO, True) return func
Decorator that adds `call with info` tag attribute to decorated function. :param func: Decorated function. :return: Decorated function.
https://github.com/aoikuiyuyou/aoikhotkey/blob/7a04f8fddee02df41ff436a696b5261ae588cb62/src/aoikhotkey/hotkey_spec_parser.py#L113-L125
from __future__ import absolute_import import os.path from subprocess import Popen import webbrowser from .const import HOTKEY_INFO_K_HOTKEY_FULL_SPEC from .const import HOTKEY_INFO_K_HOTKEY_FUNC from .const import HOTKEY_INFO_K_HOTKEY_ORIG_SPEC from .const import HOTKEY_INFO_K_HOTKEY_PATTERN from .const import HOTKEY_INFO_K_HOTKEY_TYPE from .const import HOTKEY_INFO_K_NEED_HOTKEY_INFO_LIST from .const import HOTKEY_TYPE_V_DN from .const import HOTKEY_TYPE_V_KS from .const import HOTKEY_TYPE_V_UP class _CallAll(object): def __init__(self, funcs): self._funcs = funcs def __call__(self): res = None for func in self._funcs: res = func() if res is False: return False return res class NeedHotkeyInfo(object): def hotkey_info_set(self, hotkey_info): raise NotImplementedError() _TAG_CALL_IN_MAIN_THREAD = '_AOIKHOTKEY_TAG_CALL_IN_MAIN_THREAD' def tag_call_in_main_thread(func): setattr(func, _TAG_CALL_IN_MAIN_THREAD, True) return func def tag_call_in_main_thread_exists(func): return getattr(func, _TAG_CALL_IN_MAIN_THREAD, False) _TAG_CALL_WITH_INFO = '_AOIKHOTKEY_TAG_CALL_WITH_INFO'
MIT License
peerchemist/cryptotik
cryptotik/kraken.py
KrakenNormalized.buy_limit
python
def buy_limit(self, pair, rate, amount, leverage=None): return self.private_api(self.url + 'private/AddOrder', params={'pair': self.format_pair(pair), 'type': 'buy', 'ordertype': 'limit', 'price': rate, 'volume': amount })
creates buy limit order for <pair> at <price> for <quantity>
https://github.com/peerchemist/cryptotik/blob/24ffd74c43ff1fc171081e135cb2b66b775af3f3/cryptotik/kraken.py#L521-L528
import hmac import hashlib import time import base64 import requests from cryptotik.common import (headers, ExchangeWrapper, NormalizedExchangeWrapper) from cryptotik.exceptions import (InvalidBaseCurrencyError, InvalidDelimiterError, APIError) from re import findall from decimal import Decimal from datetime import datetime class Kraken(ExchangeWrapper): url = 'https://api.kraken.com/0/' name = 'kraken' delimiter = "" headers = headers taker_fee, maker_fee = 0.00, 0.00 quote_order = 0 base_currencies = ['xbt', 'eur', 'usd', 'eth', 'cad', 'gbp', 'jpy'] @classmethod def format_pair(cls, pair): return "".join(findall(r"[^\W\d_]+|\d+", pair)).upper() def get_base_currencies(self): raise NotImplementedError def __init__(self, apikey=None, secret=None, timeout=None, proxy=None): if apikey and secret: self.apikey = apikey.encode('utf-8') self.secret = secret.encode('utf-8') if proxy: assert proxy.startswith('https'), {'Error': 'Only https proxies supported.'} self.proxy = {'https': proxy} if not timeout: self.timeout = (8, 15) else: self.timeout = timeout self.api_session = requests.Session() def _verify_response(self, response): if response.json()['error']: raise APIError(response.json()['error']) def _generate_signature(self, message): sig = hmac.new(base64.b64decode(self.secret), message, hashlib.sha512) return base64.b64encode(sig.digest()).decode() def api(self, url, params=None): try: result = self.api_session.get(url, headers=self.headers, params=params, timeout=self.timeout, proxies=self.proxy) result.raise_for_status() except requests.exceptions.HTTPError as e: print(e) self._verify_response(result) return result.json()['result'] def get_nonce(self): return int(1000 * time.time()) def private_api(self, url, params={}): urlpath = url[22:] data = params data['nonce'] = self.get_nonce() postdata = requests.compat.urlencode(data) encoded = (str(data['nonce']) + postdata).encode() message = urlpath.encode() + hashlib.sha256(encoded).digest() signature = self._generate_signature(message) try: result = self.api_session.post(url, data=data, headers={ 'API-Key': self.apikey, 'API-Sign': signature}, timeout=self.timeout, proxies=self.proxy) except requests.exceptions.HTTPError as e: print(e) self._verify_response(result) return result.json()['result'] def get_markets(self): markets = self.api(self.url + "public/AssetPairs") return [markets[i]['altname'].lower() for i in markets.keys()] def get_market_ohlcv_data(self, pair, interval, since=None): if str(interval) not in "1, 5, 15, 30, 60, 240, 1440, 10080, 21600".split(', '): raise APIError('Unsupported interval.') return self.api(self.url + 'public/OHLC', params={'pair': self.format_pair(pair), 'interval': interval, 'since': since}) def get_market_ticker(self, pair): p = self.format_pair(pair) return self.api(self.url + "public/Ticker", params={'pair': p})[p] def get_market_volume(self, pair): return self.get_market_ticker(self.format_pair(pair))['v'][1] def get_market_trade_history(self, pair, limit=200): p = self.format_pair(pair) return self.api(self.url + "public/Trades", params={'pair': p})[p][:limit] def get_market_orders(self, pair, depth=100): p = self.format_pair(pair) r = self.api(self.url + "public/Depth", params={'pair': p, 'count': depth}) pair_full_name = list(r.keys())[0] return r[pair_full_name] def get_market_sell_orders(self, pair, depth=100): return self.get_market_orders(pair, depth)['asks'] def get_market_buy_orders(self, pair, depth=100): return self.get_market_orders(pair, depth)['bids'] def get_balances(self): return self.private_api(self.url + "private/Balance") def get_deposit_method(self, currency): return self.private_api(self.url + "private/DepositMethods", params={'asset': currency.upper()} )[0]['method'] def get_deposit_address(self, currency): result = self.private_api(self.url + "private/DepositAddresses", params={'asset': currency.upper(), 'method': self.get_deposit_method(currency)} ) if result == []: result = self.private_api(self.url + "private/DepositAddresses", params={'asset': currency.upper(), 'method': self.get_deposit_method(currency), 'new': 'true'} ) return result[0]['address'] def buy_limit(self, pair, rate, amount, leverage=None): return self.private_api(self.url + 'private/AddOrder', params={'pair': self.format_pair(pair), 'type': 'buy', 'ordertype': 'limit', 'price': rate, 'volume': amount, 'leverage': leverage }) def buy_market(self, pair, amount, leverage=None): return self.private_api(self.url + 'private/AddOrder', params={'pair': self.format_pair(pair), 'type': 'buy', 'ordertype': 'market', 'volume': amount, 'leverage': leverage }) def sell_limit(self, pair, rate, amount, leverage=None): return self.private_api(self.url + 'private/AddOrder', params={'pair': self.format_pair(pair), 'type': 'sell', 'ordertype': 'limit', 'price': rate, 'volume': amount, 'leverage': leverage }) def sell_market(self, pair, amount, leverage=None): return self.private_api(self.url + 'private/AddOrder', params={'pair': self.format_pair(pair), 'type': 'sell', 'ordertype': 'market', 'volume': amount, 'leverage': leverage }) def sell_stop_loss(self, pair, rate, amount, leverage=None): return self.private_api(self.url + 'private/AddOrder', params={'pair': self.format_pair(pair), 'type': 'sell', 'ordertype': 'stop-loss', 'price': rate, 'volume': amount, 'leverage': leverage }) def buy_stop_loss(self, pair, rate, amount, leverage=None): return self.private_api(self.url + 'private/AddOrder', params={'pair': self.format_pair(pair), 'type': 'buy', 'ordertype': 'stop-loss', 'price': rate, 'volume': amount, 'leverage': leverage }) def withdraw(self, currency, amount, withdrawal_key_name): return self.private_api(self.url + 'private/Withdraw', params={'asset': currency.upper(), 'key': withdrawal_key_name, 'amount': amount }) def get_withdraw_history(self, currency): return self.private_api(self.url + "private/WithdrawStatus", params={'asset': currency.upper()}) def get_deposit_history(self, currency): return self.private_api(self.url + "private/DepositStatus", params={'asset': currency.upper(), 'method': self.get_deposit_method(currency) }) def get_open_positions(self, docalcs=False): return self.private_api(self.url + "private/OpenPositions", params={'docalcs': docalcs} ) def get_open_orders(self): return self.private_api(self.url + "private/OpenOrders", params={'trades': 'true'} )['open'] def get_order(self, orderId): return self.private_api(self.url + "private/QueryOrders", params={'trades': 'true', 'txid': orderId}) def cancel_order(self, orderId): return self.private_api(self.url + "private/CancelOrder", params={'txid': orderId}) def cancel_all_orders(self): for txid in self.get_open_orders(): self.cancel_order(txid) class KrakenNormalized(Kraken, NormalizedExchangeWrapper): def __init__(self, apikey=None, secret=None, timeout=None, proxy=None): super(KrakenNormalized, self).__init__(apikey, secret, timeout, proxy) _names = { 'GNO': 'GNO', 'EOS': 'EOS', 'DASH': 'DASH', 'BCH': 'BCH', 'XETC': 'ETC', 'XETH': 'ETH', 'XLTC': 'LTC', 'XXBT': 'XBT', 'XXMR': 'XMR', 'XXRP': 'XRP', 'XZEC': 'ZEC', 'XMLN': 'MLN', 'XREP': 'REP', 'XXLM': 'XLM', 'ZEUR': 'EUR', 'ZUSD': 'USD', 'ZCAD': 'CAD', 'ZGBP': 'GBP', 'ZJPY': 'JPY' } @classmethod def format_pair(self, market_pair): market_pair = market_pair.upper() if "-" not in market_pair: raise InvalidDelimiterError('Agreed upon delimiter is "-".') quote, base = market_pair.split('-') if base == "BTC": base = "XBT" if base.lower() not in self.base_currencies: raise InvalidBaseCurrencyError('''Expected input is quote-base, you have provided with {pair}'''.format(pair=market_pair)) return quote + self.delimiter + base @staticmethod def _tstamp_to_datetime(timestamp): return datetime.fromtimestamp(timestamp) @staticmethod def _is_sale(s): if s == "s": return True else: return False def get_markets(self): upstream = super(KrakenNormalized, self).get_markets() quotes = [] for i in upstream: for base in self.base_currencies: if base in i: quotes.append("".join(i.rsplit(base, 1)) + '-' + base) return quotes def get_market_ticker(self, market): ticker = super(KrakenNormalized, self).get_market_ticker(market) return { 'ask': ticker['a'][0], 'bid': ticker['b'][0], 'last': ticker['c'][0] } def get_balances(self): ticker = super(KrakenNormalized, self).get_balances() return {self._names[k]: v for k, v in ticker.items()} def get_market_trade_history(self, market, depth=100): upstream = super(KrakenNormalized, self).get_market_trade_history(market, depth) downstream = [] for data in upstream: downstream.append({ 'timestamp': self._tstamp_to_datetime(data[2]), 'is_sale': self._is_sale(data[3]), 'rate': data[0], 'amount': data[1], 'trade_id': data[2] }) return downstream def get_market_orders(self, market, depth=100): upstream = super(KrakenNormalized, self).get_market_orders(market, depth) return { 'bids': [[i[0], i[1]] for i in upstream['bids']], 'asks': [[i[0], i[1]] for i in upstream['asks']] } def get_market_sell_orders(self, market, depth=100): return self.get_market_orders(market, depth)['asks'] def get_market_buy_orders(self, market, depth=100): return self.get_market_orders(market, depth)['bids'] def get_market_spread(self, market): order_book = super(KrakenNormalized, self).get_market_orders(market, 1) ask = order_book['asks'][0][0] bid = order_book['bids'][0][0] return Decimal(ask) - Decimal(bid) def get_market_depth(self, market): order_book = self.get_market_orders(market, 1000) return {"bids": sum([Decimal(i[0]) * Decimal(i[1]) for i in order_book["bids"]]), "asks": sum([Decimal(i[1]) for i in order_book["asks"]]) } def get_market_ohlcv_data(self, market, interval=1, since=1): if interval.endswith('m'): interval = str(interval).rstrip('m') elif interval.endswith('d'): interval = int(interval.rstrip('d')) * 1440 elif interval.endswith('h'): interval = int(interval.rstrip('h')) * 60 if str(interval) not in "1, 5, 15, 30, 60, 240, 1440, 10080, 21600".split(', '): raise APIError('Unsupported interval.') upstream = super(KrakenNormalized, self ).get_market_ohlcv_data(market, interval, int(since) ) r = [] for ohlcv in upstream[next(iter(upstream))]: r.append({ 'open': float(ohlcv[1]), 'high': float(ohlcv[2]), 'low': float(ohlcv[3]), 'close': float(ohlcv[4]), 'volume': float(ohlcv[6]), 'time': self._tstamp_to_datetime(int(ohlcv[0])) }) return r
BSD 3-Clause New or Revised License
snight1983/chia-rosechain
chia/wallet/wallet_node.py
WalletNode._sync
python
async def _sync(self) -> None: if self.wallet_state_manager is None or self.backup_initialized is False or self.server is None: return None highest_weight: uint128 = uint128(0) peak_height: uint32 = uint32(0) peak: Optional[HeaderBlock] = None potential_peaks: List[ Tuple[bytes32, HeaderBlock] ] = self.wallet_state_manager.sync_store.get_potential_peaks_tuples() self.log.info(f"Have collected {len(potential_peaks)} potential peaks") for header_hash, potential_peak_block in potential_peaks: if potential_peak_block.weight > highest_weight: highest_weight = potential_peak_block.weight peak_height = potential_peak_block.height peak = potential_peak_block if peak_height is None or peak_height == 0: return None if self.wallet_state_manager.peak is not None and highest_weight <= self.wallet_state_manager.peak.weight: self.log.info("Not performing sync, already caught up.") return None peers: List[WSChiaConnection] = self.server.get_full_node_connections() if len(peers) == 0: self.log.info("No peers to sync to") return None async with self.wallet_state_manager.blockchain.lock: fork_height = None if peak is not None: fork_height = self.wallet_state_manager.sync_store.get_potential_fork_point(peak.header_hash) our_peak_height = self.wallet_state_manager.blockchain.get_peak_height() ses_heigths = self.wallet_state_manager.blockchain.get_ses_heights() if len(ses_heigths) > 2 and our_peak_height is not None: ses_heigths.sort() max_fork_ses_height = ses_heigths[-3] if ( self.wallet_state_manager.blockchain.get_peak_height() is not None and fork_height == max_fork_ses_height ): peers = self.server.get_full_node_connections() for peer in peers: potential_height = uint32(our_peak_height + 1) block_response: Optional[Any] = await peer.request_header_blocks( wallet_protocol.RequestHeaderBlocks(potential_height, potential_height) ) if block_response is not None and isinstance( block_response, wallet_protocol.RespondHeaderBlocks ): our_peak = self.wallet_state_manager.blockchain.get_peak() if ( our_peak is not None and block_response.header_blocks[0].prev_header_hash == our_peak.header_hash ): fork_height = our_peak_height break if fork_height is None: fork_height = uint32(0) await self.wallet_state_manager.blockchain.warmup(fork_height) await self.batch_sync_to_peak(fork_height, peak)
Wallet has fallen far behind (or is starting up for the first time), and must be synced up to the LCA of the blockchain.
https://github.com/snight1983/chia-rosechain/blob/caafdb0aaf044b26957047b4acf3eed89fbaa600/chia/wallet/wallet_node.py#L591-L660
import asyncio import json import logging import socket import time import traceback from pathlib import Path from typing import Callable, Dict, List, Optional, Set, Tuple, Union, Any from blspy import PrivateKey from chia.consensus.block_record import BlockRecord from chia.consensus.constants import ConsensusConstants from chia.consensus.multiprocess_validation import PreValidationResult from chia.pools.pool_puzzles import SINGLETON_LAUNCHER_HASH from chia.protocols import wallet_protocol from chia.protocols.full_node_protocol import RequestProofOfWeight, RespondProofOfWeight from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.protocols.wallet_protocol import ( RejectAdditionsRequest, RejectRemovalsRequest, RequestAdditions, RequestHeaderBlocks, RespondAdditions, RespondBlockHeader, RespondHeaderBlocks, RespondRemovals, ) from chia.server.node_discovery import WalletPeers from chia.server.outbound_message import Message, NodeType, make_msg from chia.server.server import ChiaServer from chia.server.ws_connection import WSChiaConnection from chia.types.blockchain_format.coin import Coin, hash_coin_list from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.coin_spend import CoinSpend from chia.types.header_block import HeaderBlock from chia.types.mempool_inclusion_status import MempoolInclusionStatus from chia.types.peer_info import PeerInfo from chia.util.byte_types import hexstr_to_bytes from chia.util.errors import Err, ValidationError from chia.util.ints import uint32, uint128 from chia.util.keychain import Keychain from chia.util.lru_cache import LRUCache from chia.util.merkle_set import MerkleSet, confirm_included_already_hashed, confirm_not_included_already_hashed from chia.util.path import mkdir, path_from_root from chia.wallet.block_record import HeaderBlockRecord from chia.wallet.derivation_record import DerivationRecord from chia.wallet.settings.settings_objects import BackupInitialized from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.backup_utils import open_backup_file from chia.wallet.util.wallet_types import WalletType from chia.wallet.wallet_action import WalletAction from chia.wallet.wallet_blockchain import ReceiveBlockResult from chia.wallet.wallet_state_manager import WalletStateManager from chia.util.profiler import profile_task class WalletNode: key_config: Dict config: Dict constants: ConsensusConstants server: Optional[ChiaServer] log: logging.Logger wallet_peers: WalletPeers wallet_state_manager: Optional[WalletStateManager] short_sync_threshold: int _shut_down: bool root_path: Path state_changed_callback: Optional[Callable] syncing: bool full_node_peer: Optional[PeerInfo] peer_task: Optional[asyncio.Task] logged_in: bool wallet_peers_initialized: bool def __init__( self, config: Dict, keychain: Keychain, root_path: Path, consensus_constants: ConsensusConstants, name: str = None, ): self.config = config self.constants = consensus_constants self.root_path = root_path self.log = logging.getLogger(name if name else __name__) self.cached_blocks: Dict = {} self.future_block_hashes: Dict = {} self.keychain = keychain self._shut_down = False self.proof_hashes: List = [] self.header_hashes: List = [] self.header_hashes_error = False self.short_sync_threshold = 15 self.potential_blocks_received: Dict = {} self.potential_header_hashes: Dict = {} self.state_changed_callback = None self.wallet_state_manager = None self.backup_initialized = False self.server = None self.wsm_close_task = None self.sync_task: Optional[asyncio.Task] = None self.logged_in_fingerprint: Optional[int] = None self.peer_task = None self.logged_in = False self.wallet_peers_initialized = False self.last_new_peak_messages = LRUCache(5) def get_key_for_fingerprint(self, fingerprint: Optional[int]) -> Optional[PrivateKey]: private_keys = self.keychain.get_all_private_keys() if len(private_keys) == 0: self.log.warning("No keys present. Create keys with the UI, or with the 'chia keys' program.") return None private_key: Optional[PrivateKey] = None if fingerprint is not None: for sk, _ in private_keys: if sk.get_g1().get_fingerprint() == fingerprint: private_key = sk break else: private_key = private_keys[0][0] return private_key async def _start( self, fingerprint: Optional[int] = None, new_wallet: bool = False, backup_file: Optional[Path] = None, skip_backup_import: bool = False, ) -> bool: private_key = self.get_key_for_fingerprint(fingerprint) if private_key is None: self.logged_in = False return False if self.config.get("enable_profiler", False): asyncio.create_task(profile_task(self.root_path, "wallet", self.log)) db_path_key_suffix = str(private_key.get_g1().get_fingerprint()) db_path_replaced: str = ( self.config["database_path"] .replace("CHALLENGE", self.config["selected_network"]) .replace("KEY", db_path_key_suffix) ) path = path_from_root(self.root_path, db_path_replaced) mkdir(path.parent) self.new_peak_lock = asyncio.Lock() assert self.server is not None self.wallet_state_manager = await WalletStateManager.create( private_key, self.config, path, self.constants, self.server, self.root_path ) self.wsm_close_task = None assert self.wallet_state_manager is not None backup_settings: BackupInitialized = self.wallet_state_manager.user_settings.get_backup_settings() if backup_settings.user_initialized is False: if new_wallet is True: await self.wallet_state_manager.user_settings.user_created_new_wallet() self.wallet_state_manager.new_wallet = True elif skip_backup_import is True: await self.wallet_state_manager.user_settings.user_skipped_backup_import() elif backup_file is not None: await self.wallet_state_manager.import_backup_info(backup_file) else: self.backup_initialized = False await self.wallet_state_manager.close_all_stores() self.wallet_state_manager = None self.logged_in = False return False self.backup_initialized = True if self.wallet_peers_initialized is False: asyncio.create_task(self.wallet_peers.start()) self.wallet_peers_initialized = True if backup_file is not None: json_dict = open_backup_file(backup_file, self.wallet_state_manager.private_key) if "start_height" in json_dict["data"]: start_height = json_dict["data"]["start_height"] self.config["starting_height"] = max(0, start_height - self.config["start_height_buffer"]) else: self.config["starting_height"] = 0 else: self.config["starting_height"] = 0 if self.state_changed_callback is not None: self.wallet_state_manager.set_callback(self.state_changed_callback) self.wallet_state_manager.set_pending_callback(self._pending_tx_handler) self._shut_down = False self.peer_task = asyncio.create_task(self._periodically_check_full_node()) self.sync_event = asyncio.Event() self.sync_task = asyncio.create_task(self.sync_job()) self.logged_in_fingerprint = fingerprint self.logged_in = True return True def _close(self): self.log.info("self._close") self.logged_in_fingerprint = None self._shut_down = True async def _await_closed(self): self.log.info("self._await_closed") await self.server.close_all_connections() asyncio.create_task(self.wallet_peers.ensure_is_closed()) if self.wallet_state_manager is not None: await self.wallet_state_manager.close_all_stores() self.wallet_state_manager = None if self.sync_task is not None: self.sync_task.cancel() self.sync_task = None if self.peer_task is not None: self.peer_task.cancel() self.peer_task = None self.logged_in = False def _set_state_changed_callback(self, callback: Callable): self.state_changed_callback = callback if self.wallet_state_manager is not None: self.wallet_state_manager.set_callback(self.state_changed_callback) self.wallet_state_manager.set_pending_callback(self._pending_tx_handler) def _pending_tx_handler(self): if self.wallet_state_manager is None or self.backup_initialized is False: return None asyncio.create_task(self._resend_queue()) async def _action_messages(self) -> List[Message]: if self.wallet_state_manager is None or self.backup_initialized is False: return [] actions: List[WalletAction] = await self.wallet_state_manager.action_store.get_all_pending_actions() result: List[Message] = [] for action in actions: data = json.loads(action.data) action_data = data["data"]["action_data"] if action.name == "request_puzzle_solution": coin_name = bytes32(hexstr_to_bytes(action_data["coin_name"])) height = uint32(action_data["height"]) msg = make_msg( ProtocolMessageTypes.request_puzzle_solution, wallet_protocol.RequestPuzzleSolution(coin_name, height), ) result.append(msg) return result async def _resend_queue(self): if ( self._shut_down or self.server is None or self.wallet_state_manager is None or self.backup_initialized is None ): return None for msg, sent_peers in await self._messages_to_resend(): if ( self._shut_down or self.server is None or self.wallet_state_manager is None or self.backup_initialized is None ): return None full_nodes = self.server.get_full_node_connections() for peer in full_nodes: if peer.peer_node_id in sent_peers: continue await peer.send_message(msg) for msg in await self._action_messages(): if ( self._shut_down or self.server is None or self.wallet_state_manager is None or self.backup_initialized is None ): return None await self.server.send_to_all([msg], NodeType.FULL_NODE) async def _messages_to_resend(self) -> List[Tuple[Message, Set[bytes32]]]: if self.wallet_state_manager is None or self.backup_initialized is False or self._shut_down: return [] messages: List[Tuple[Message, Set[bytes32]]] = [] records: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_not_sent() for record in records: if record.spend_bundle is None: continue msg = make_msg( ProtocolMessageTypes.send_transaction, wallet_protocol.SendTransaction(record.spend_bundle), ) already_sent = set() for peer, status, _ in record.sent_to: if status == MempoolInclusionStatus.SUCCESS.value: already_sent.add(hexstr_to_bytes(peer)) messages.append((msg, already_sent)) return messages def set_server(self, server: ChiaServer): self.server = server DNS_SERVERS_EMPTY: list = [] self.wallet_peers = WalletPeers( self.server, self.root_path, self.config["target_peer_count"], self.config["wallet_peers_path"], self.config["introducer_peer"], DNS_SERVERS_EMPTY, self.config["peer_connect_interval"], self.config["selected_network"], None, self.log, ) async def on_connect(self, peer: WSChiaConnection): if self.wallet_state_manager is None or self.backup_initialized is False: return None messages_peer_ids = await self._messages_to_resend() self.wallet_state_manager.state_changed("add_connection") for msg, peer_ids in messages_peer_ids: if peer.peer_node_id in peer_ids: continue await peer.send_message(msg) if not self.has_full_node() and self.wallet_peers is not None: asyncio.create_task(self.wallet_peers.on_connect(peer)) async def _periodically_check_full_node(self) -> None: tries = 0 while not self._shut_down and tries < 5: if self.has_full_node(): await self.wallet_peers.ensure_is_closed() if self.wallet_state_manager is not None: self.wallet_state_manager.state_changed("add_connection") break tries += 1 await asyncio.sleep(self.config["peer_connect_interval"]) def has_full_node(self) -> bool: if self.server is None: return False if "full_node_peer" in self.config: full_node_peer = PeerInfo( self.config["full_node_peer"]["host"], self.config["full_node_peer"]["port"], ) peers = [c.get_peer_info() for c in self.server.get_full_node_connections()] full_node_resolved = PeerInfo(socket.gethostbyname(full_node_peer.host), full_node_peer.port) if full_node_peer in peers or full_node_resolved in peers: self.log.info(f"Will not attempt to connect to other nodes, already connected to {full_node_peer}") for connection in self.server.get_full_node_connections(): if ( connection.get_peer_info() != full_node_peer and connection.get_peer_info() != full_node_resolved ): self.log.info(f"Closing unnecessary connection to {connection.get_peer_info()}.") asyncio.create_task(connection.close()) return True return False async def complete_blocks(self, header_blocks: List[HeaderBlock], peer: WSChiaConnection): if self.wallet_state_manager is None: return None header_block_records: List[HeaderBlockRecord] = [] assert self.server trusted = self.server.is_trusted_peer(peer, self.config["trusted_peers"]) async with self.wallet_state_manager.blockchain.lock: for block in header_blocks: if block.is_transaction_block: (additions, removals,) = await self.wallet_state_manager.get_filter_additions_removals( block, block.transactions_filter, None ) added_coins = await self.get_additions(peer, block, additions) if added_coins is None: raise ValueError("Failed to fetch additions") removed_coins = await self.get_removals(peer, block, added_coins, removals) if removed_coins is None: raise ValueError("Failed to fetch removals") additional_coin_spends: List[CoinSpend] = await self.get_additional_coin_spends( peer, block, added_coins, removed_coins ) hbr = HeaderBlockRecord(block, added_coins, removed_coins) else: hbr = HeaderBlockRecord(block, [], []) header_block_records.append(hbr) additional_coin_spends = [] (result, error, fork_h,) = await self.wallet_state_manager.blockchain.receive_block( hbr, trusted=trusted, additional_coin_spends=additional_coin_spends ) if result == ReceiveBlockResult.NEW_PEAK: if not self.wallet_state_manager.sync_mode: self.wallet_state_manager.blockchain.clean_block_records() self.wallet_state_manager.state_changed("new_block") self.wallet_state_manager.state_changed("sync_changed") await self.wallet_state_manager.new_peak() elif result == ReceiveBlockResult.INVALID_BLOCK: self.log.info(f"Invalid block from peer: {peer.get_peer_info()} {error}") await peer.close() return else: self.log.debug(f"Result: {result}") async def new_peak_wallet(self, peak: wallet_protocol.NewPeakWallet, peer: WSChiaConnection): if self.wallet_state_manager is None: return if self.wallet_state_manager.blockchain.contains_block(peak.header_hash): self.log.debug(f"known peak {peak.header_hash}") return if self.wallet_state_manager.sync_mode: self.last_new_peak_messages.put(peer, peak) return async with self.new_peak_lock: curr_peak = self.wallet_state_manager.blockchain.get_peak() if curr_peak is not None and curr_peak.weight >= peak.weight: return request = wallet_protocol.RequestBlockHeader(peak.height) response: Optional[RespondBlockHeader] = await peer.request_block_header(request) if response is None or not isinstance(response, RespondBlockHeader) or response.header_block is None: self.log.warning(f"bad peak response from peer {response}") return header_block = response.header_block curr_peak_height = 0 if curr_peak is None else curr_peak.height if (curr_peak_height == 0 and peak.height < self.constants.WEIGHT_PROOF_RECENT_BLOCKS) or ( curr_peak_height > peak.height - 200 ): if peak.height <= curr_peak_height + self.config["short_sync_blocks_behind_threshold"]: await self.wallet_short_sync_backtrack(header_block, peer) else: await self.batch_sync_to_peak(curr_peak_height, peak) elif peak.height >= self.constants.WEIGHT_PROOF_RECENT_BLOCKS: weight_request = RequestProofOfWeight(peak.height, peak.header_hash) weight_proof_response: RespondProofOfWeight = await peer.request_proof_of_weight( weight_request, timeout=360 ) if weight_proof_response is None: return weight_proof = weight_proof_response.wp if self.wallet_state_manager is None: return if self.server is not None and self.server.is_trusted_peer(peer, self.config["trusted_peers"]): valid, fork_point = self.wallet_state_manager.weight_proof_handler.get_fork_point_no_validations( weight_proof ) else: valid, fork_point, _ = await self.wallet_state_manager.weight_proof_handler.validate_weight_proof( weight_proof ) if not valid: self.log.error( f"invalid weight proof, num of epochs {len(weight_proof.sub_epochs)}" f" recent blocks num ,{len(weight_proof.recent_chain_data)}" ) self.log.debug(f"{weight_proof}") return self.log.info(f"Validated, fork point is {fork_point}") self.wallet_state_manager.sync_store.add_potential_fork_point( header_block.header_hash, uint32(fork_point) ) self.wallet_state_manager.sync_store.add_potential_peak(header_block) self.start_sync() async def wallet_short_sync_backtrack(self, header_block, peer): top = header_block blocks = [top] while not self.wallet_state_manager.blockchain.contains_block(top.prev_header_hash) and top.height > 0: request_prev = wallet_protocol.RequestBlockHeader(top.height - 1) response_prev: Optional[RespondBlockHeader] = await peer.request_block_header(request_prev) if response_prev is None or not isinstance(response_prev, RespondBlockHeader): raise RuntimeError("bad block header response from peer while syncing") prev_head = response_prev.header_block blocks.append(prev_head) top = prev_head blocks.reverse() await self.complete_blocks(blocks, peer) await self.wallet_state_manager.create_more_puzzle_hashes() async def batch_sync_to_peak(self, fork_height, peak): advanced_peak = False batch_size = self.constants.MAX_BLOCK_COUNT_PER_REQUESTS for i in range(max(0, fork_height - 1), peak.height, batch_size): start_height = i end_height = min(peak.height, start_height + batch_size) peers = self.server.get_full_node_connections() added = False for peer in peers: try: added, advanced_peak = await self.fetch_blocks_and_validate( peer, uint32(start_height), uint32(end_height), None if advanced_peak else fork_height ) if added: break except Exception as e: await peer.close() exc = traceback.format_exc() self.log.error(f"Error while trying to fetch from peer:{e} {exc}") if not added: raise RuntimeError(f"Was not able to add blocks {start_height}-{end_height}") curr_peak = self.wallet_state_manager.blockchain.get_peak() assert peak is not None self.wallet_state_manager.blockchain.clean_block_record( min(end_height, curr_peak.height) - self.constants.BLOCKS_CACHE_SIZE ) def start_sync(self) -> None: self.log.info("self.sync_event.set()") self.sync_event.set() async def check_new_peak(self) -> None: if self.wallet_state_manager is None: return None current_peak: Optional[BlockRecord] = self.wallet_state_manager.blockchain.get_peak() if current_peak is None: return None potential_peaks: List[ Tuple[bytes32, HeaderBlock] ] = self.wallet_state_manager.sync_store.get_potential_peaks_tuples() for _, block in potential_peaks: if current_peak.weight < block.weight: await asyncio.sleep(5) self.start_sync() return None async def sync_job(self) -> None: while True: self.log.info("Loop start in sync job") if self._shut_down is True: break asyncio.create_task(self.check_new_peak()) await self.sync_event.wait() self.last_new_peak_messages = LRUCache(5) self.sync_event.clear() if self._shut_down is True: break try: assert self.wallet_state_manager is not None self.wallet_state_manager.set_sync_mode(True) await self._sync() except Exception as e: tb = traceback.format_exc() self.log.error(f"Loop exception in sync {e}. {tb}") finally: if self.wallet_state_manager is not None: self.wallet_state_manager.set_sync_mode(False) for peer, peak in self.last_new_peak_messages.cache.items(): asyncio.create_task(self.new_peak_wallet(peak, peer)) self.log.info("Loop end in sync job")
Apache License 2.0
openmdao/dymos
dymos/transcriptions/pseudospectral/gauss_lobatto.py
GaussLobatto.configure_path_constraints
python
def configure_path_constraints(self, phase): super(GaussLobatto, self).configure_path_constraints(phase) for var, options in phase._path_constraints.items(): con_name = options['constraint_name'] con_units = options['units'] var_type = phase.classify_var(var) if var_type == 'time': src = 'time' tgt = f'path_constraints.all_values:{con_name}' elif var_type == 'time_phase': src = 'time_phase' tgt = f'path_constraints.all_values:{con_name}' elif var_type == 'state': src = f'interleave_comp.all_values:states:{var}' tgt = f'path_constraints.all_values:{con_name}' elif var_type in ('indep_control', 'input_control'): src = f'control_values:{var}' tgt = f'path_constraints.all_values:{con_name}' elif var_type in ('indep_polynomial_control', 'input_polynomial_control'): src = f'polynomial_control_values:{var}' tgt = f'path_constraints.all_values:{con_name}' elif var_type == 'control_rate': control_name = var[:-5] src = f'control_rates:{control_name}_rate' tgt = f'path_constraints.all_values:{con_name}' elif var_type == 'control_rate2': control_name = var[:-6] src = f'control_rates:{control_name}_rate2' tgt = f'path_constraints.all_values:{con_name}' elif var_type == 'polynomial_control_rate': control_name = var[:-5] src = f'polynomial_control_rates:{control_name}_rate' tgt = f'path_constraints.all_values:{con_name}' elif var_type == 'polynomial_control_rate2': control_name = var[:-6] src = f'polynomial_control_rates:{control_name}_rate2' tgt = f'path_constraints.all_values:{con_name}' else: interleave_comp = phase._get_subsystem('interleave_comp') src = f'interleave_comp.all_values:{con_name}' tgt = f'path_constraints.all_values:{con_name}' ode_outputs = {opts['prom_name']: opts for (k, opts) in phase.rhs_disc.get_io_metadata(iotypes=('output',)).items()} if var in ode_outputs: shape = (1,) if len(ode_outputs[var]['shape']) == 1 else ode_outputs[var]['shape'][1:] units = ode_outputs[var]['units'] if con_units is None else con_units src_added = interleave_comp.add_var(con_name, shape, units, disc_src=f'rhs_disc.{var}', col_src=f'rhs_col.{var}') if src_added: phase.connect(src_name=f'rhs_disc.{var}', tgt_name=f'interleave_comp.disc_values:{con_name}') phase.connect(src_name=f'rhs_col.{var}', tgt_name=f'interleave_comp.col_values:{con_name}') else: raise ValueError(f'Path-constrained variable {var} is not a known variable in' f' the phase {phase.pathname} nor is it a known output of ' f' the ODE.') phase.connect(src_name=src, tgt_name=tgt)
Handle the common operations for configuration of the path constraints. Parameters ---------- phase : dymos.Phase The phase object to which this transcription instance applies.
https://github.com/openmdao/dymos/blob/936b838133088cf0f79bf980cc9c0f5938b3db7a/dymos/transcriptions/pseudospectral/gauss_lobatto.py#L440-L528
from collections import defaultdict import warnings import numpy as np import openmdao.api as om from openmdao.utils.general_utils import simple_warning from .pseudospectral_base import PseudospectralBase from .components import GaussLobattoInterleaveComp from ..common import GaussLobattoContinuityComp from ...utils.misc import get_rate_units, _unspecified from ...utils.introspection import get_targets, get_source_metadata, get_target_metadata from ...utils.indexing import get_src_indices_by_row from ..grid_data import GridData, make_subset_map from fnmatch import filter class GaussLobatto(PseudospectralBase): def __init__(self, **kwargs): super(GaussLobatto, self).__init__(**kwargs) self._rhs_source = 'rhs_disc' def init_grid(self): self.grid_data = GridData(num_segments=self.options['num_segments'], transcription='gauss-lobatto', transcription_order=self.options['order'], segment_ends=self.options['segment_ends'], compressed=self.options['compressed']) def configure_time(self, phase): super(GaussLobatto, self).configure_time(phase) options = phase.time_options for name, usr_tgts, dynamic in [('time', options['targets'], True), ('time_phase', options['time_phase_targets'], True)]: targets = get_targets(phase.rhs_disc, name=name, user_targets=usr_tgts) if targets: if dynamic: disc_src_idxs = self.grid_data.subset_node_indices['state_disc'] col_src_idxs = self.grid_data.subset_node_indices['col'] else: disc_src_idxs = col_src_idxs = None phase.connect(name, [f'rhs_col.{t}' for t in targets], src_indices=col_src_idxs, flat_src_indices=True) phase.connect(name, [f'rhs_disc.{t}' for t in targets], src_indices=disc_src_idxs, flat_src_indices=True) for name, usr_tgts, dynamic in [('t_initial', options['t_initial_targets'], False), ('t_duration', options['t_duration_targets'], False)]: targets = get_targets(phase.rhs_disc, name=name, user_targets=usr_tgts) shape, units, static_target = get_target_metadata(phase.rhs_disc, name=name, user_targets=targets, user_units=options['units'], user_shape=(1,)) if shape == (1,): disc_src_idxs = None col_src_idxs = None flat_src_idxs = None src_shape = None else: disc_src_idxs = self.grid_data.subset_node_indices['state_disc'] col_src_idxs = self.grid_data.subset_node_indices['col'] flat_src_idxs = True src_shape = (1,) for t in targets: phase.promotes('rhs_disc', inputs=[(t, name)], src_indices=disc_src_idxs, flat_src_indices=flat_src_idxs, src_shape=src_shape) phase.promotes('rhs_col', inputs=[(t, name)], src_indices=col_src_idxs, flat_src_indices=flat_src_idxs, src_shape=src_shape) if targets: phase.set_input_defaults(name=name, val=np.ones((1,)), units=options['units']) def configure_controls(self, phase): super(GaussLobatto, self).configure_controls(phase) grid_data = self.grid_data for name, options in phase.control_options.items(): disc_idxs = grid_data.subset_node_indices['state_disc'] col_idxs = grid_data.subset_node_indices['col'] disc_src_idxs = get_src_indices_by_row(disc_idxs, options['shape']) col_src_idxs = get_src_indices_by_row(col_idxs, options['shape']) if options['shape'] == (1,): disc_src_idxs = disc_src_idxs.ravel() col_src_idxs = col_src_idxs.ravel() disc_src_idxs = (disc_src_idxs,) col_src_idxs = (col_src_idxs,) targets = get_targets(phase.rhs_disc, name, options['targets']) if targets: phase.connect(f'control_values:{name}', [f'rhs_disc.{t}' for t in targets], src_indices=disc_src_idxs, flat_src_indices=True) phase.connect(f'control_values:{name}', [f'rhs_col.{t}' for t in targets], src_indices=col_src_idxs, flat_src_indices=True) targets = get_targets(phase.rhs_disc, f'{name}_rate', options['rate_targets'], control_rates=1) if targets: phase.connect(f'control_rates:{name}_rate', [f'rhs_disc.{t}' for t in targets], src_indices=disc_src_idxs, flat_src_indices=True) phase.connect(f'control_rates:{name}_rate', [f'rhs_col.{t}' for t in targets], src_indices=col_src_idxs, flat_src_indices=True) targets = get_targets(phase.rhs_disc, f'{name}_rate2', options['rate2_targets'], control_rates=2) if targets: phase.connect(f'control_rates:{name}_rate2', [f'rhs_disc.{t}' for t in targets], src_indices=disc_src_idxs, flat_src_indices=True) phase.connect(f'control_rates:{name}_rate2', [f'rhs_col.{t}' for t in targets], src_indices=col_src_idxs, flat_src_indices=True) def configure_polynomial_controls(self, phase): super(GaussLobatto, self).configure_polynomial_controls(phase) grid_data = self.grid_data for name, options in phase.polynomial_control_options.items(): disc_idxs = grid_data.subset_node_indices['state_disc'] col_idxs = grid_data.subset_node_indices['col'] disc_src_idxs = get_src_indices_by_row(disc_idxs, options['shape']) col_src_idxs = get_src_indices_by_row(col_idxs, options['shape']) if options['shape'] == (1,): disc_src_idxs = disc_src_idxs.ravel() col_src_idxs = col_src_idxs.ravel() disc_src_idxs = (disc_src_idxs,) col_src_idxs = (col_src_idxs,) targets = get_targets(ode=phase.rhs_disc, name=name, user_targets=options['targets']) if targets: phase.connect(f'polynomial_control_values:{name}', [f'rhs_disc.{t}' for t in targets], src_indices=disc_src_idxs, flat_src_indices=True) phase.connect(f'polynomial_control_values:{name}', [f'rhs_col.{t}' for t in targets], src_indices=col_src_idxs, flat_src_indices=True) targets = get_targets(ode=phase.rhs_disc, name=f'{name}_rate', user_targets=options['rate_targets']) if targets: phase.connect(f'polynomial_control_rates:{name}_rate', [f'rhs_disc.{t}' for t in targets], src_indices=disc_src_idxs, flat_src_indices=True) phase.connect(f'polynomial_control_rates:{name}_rate', [f'rhs_col.{t}' for t in targets], src_indices=col_src_idxs, flat_src_indices=True) targets = get_targets(ode=phase.rhs_disc, name=f'{name}_rate2', user_targets=options['rate2_targets']) if targets: phase.connect(f'polynomial_control_rates:{name}_rate2', [f'rhs_disc.{t}' for t in targets], src_indices=disc_src_idxs, flat_src_indices=True) phase.connect(f'polynomial_control_rates:{name}_rate2', [f'rhs_col.{t}' for t in targets], src_indices=col_src_idxs, flat_src_indices=True) def setup_ode(self, phase): grid_data = self.grid_data ode_class = phase.options['ode_class'] kwargs = phase.options['ode_init_kwargs'] rhs_disc = ode_class(num_nodes=grid_data.subset_num_nodes['state_disc'], **kwargs) rhs_col = ode_class(num_nodes=grid_data.subset_num_nodes['col'], **kwargs) phase.add_subsystem('rhs_disc', rhs_disc) super(GaussLobatto, self).setup_ode(phase) phase.add_subsystem('rhs_col', rhs_col) phase.add_subsystem('interleave_comp', GaussLobattoInterleaveComp(grid_data=self.grid_data)) def configure_ode(self, phase): super(GaussLobatto, self).configure_ode(phase) map_input_indices_to_disc = self.grid_data.input_maps['state_input_to_disc'] for name, options in phase.state_options.items(): src_idxs = om.slicer[map_input_indices_to_disc, ...] targets = get_targets(ode=phase.rhs_disc, name=name, user_targets=options['targets']) if targets: phase.connect(f'states:{name}', [f'rhs_disc.{tgt}' for tgt in targets], src_indices=src_idxs) phase.connect(f'state_interp.state_col:{name}', [f'rhs_col.{tgt}' for tgt in targets]) rate_src = options['rate_source'] if rate_src in phase.parameter_options: shape = phase.parameter_options[rate_src]['shape'] param_size = np.prod(shape) ndn = self.grid_data.subset_num_nodes['state_disc'] src_idxs = np.tile(np.arange(0, param_size, dtype=int), ndn) src_idxs = np.reshape(src_idxs, (ndn,) + shape) phase.promotes('state_interp', inputs=[(f'staterate_disc:{name}', f'parameters:{rate_src}')], src_indices=(src_idxs,), flat_src_indices=True, src_shape=shape) else: rate_path, disc_src_idxs = self.get_rate_source_path(name, nodes='state_disc', phase=phase) phase.connect(rate_path, f'state_interp.staterate_disc:{name}', src_indices=disc_src_idxs) self.configure_interleave_comp(phase) def configure_interleave_comp(self, phase): interleave_comp = phase._get_subsystem('interleave_comp') map_input_indices_to_disc = self.grid_data.input_maps['state_input_to_disc'] time_units = phase.time_options['units'] for state_name, options in phase.state_options.items(): shape = options['shape'] units = options['units'] rate_src = options['rate_source'] src_added = interleave_comp.add_var(f'states:{state_name}', shape, units, disc_src=f'states:{state_name}', col_src=f'state_interp.state_col:{state_name}') if src_added: phase.connect(f'states:{state_name}', f'interleave_comp.disc_values:states:{state_name}', src_indices=om.slicer[map_input_indices_to_disc, ...]) phase.connect(f'state_interp.state_col:{state_name}', f'interleave_comp.col_values:states:{state_name}') if rate_src in phase.parameter_options: rate_path_disc = rate_path_col = f'parameters:{rate_src}' else: rate_path_disc, disc_src_idxs = self.get_rate_source_path(state_name, nodes='state_disc', phase=phase) rate_path_col, col_src_idxs = self.get_rate_source_path(state_name, nodes='col', phase=phase) src_added = interleave_comp.add_var(f'state_rates:{state_name}', shape, units=get_rate_units(options['units'], time_units), disc_src=rate_path_disc, col_src=rate_path_col) if src_added: if rate_src in phase.parameter_options: shape = phase.parameter_options[rate_src]['shape'] param_size = np.prod(shape) ndn = self.grid_data.subset_num_nodes['state_disc'] ncn = self.grid_data.subset_num_nodes['col'] src_idxs = np.tile(np.arange(0, param_size, dtype=int), ndn) src_idxs = np.reshape(src_idxs, (ndn,) + shape) phase.promotes('interleave_comp', inputs=[(f'disc_values:state_rates:{state_name}', f'parameters:{rate_src}')], src_indices=(src_idxs,), flat_src_indices=True, src_shape=shape) src_idxs = np.tile(np.arange(0, param_size, dtype=int), ncn) src_idxs = np.reshape(src_idxs, (ncn,) + shape) phase.promotes('interleave_comp', inputs=[(f'col_values:state_rates:{state_name}', f'parameters:{rate_src}')], src_indices=(src_idxs,), flat_src_indices=True, src_shape=shape) else: rate_path_disc, disc_src_idxs = self.get_rate_source_path(state_name, nodes='state_disc', phase=phase) phase.connect(rate_path_disc, f'interleave_comp.disc_values:state_rates:{state_name}', src_indices=disc_src_idxs) rate_path_col, col_src_idxs = self.get_rate_source_path(state_name, nodes='col', phase=phase) phase.connect(rate_path_col, f'interleave_comp.col_values:state_rates:{state_name}', src_indices=col_src_idxs) def setup_defects(self, phase): super(GaussLobatto, self).setup_defects(phase) if any(self._requires_continuity_constraints(phase)): phase.add_subsystem('continuity_comp', GaussLobattoContinuityComp(grid_data=self.grid_data, state_options=phase.state_options, control_options=phase.control_options, time_units=phase.time_options['units'])) def configure_defects(self, phase): super(GaussLobatto, self).configure_defects(phase) any_state_cnty, any_control_cnty, any_control_rate_cnty = self._requires_continuity_constraints(phase) if any_control_rate_cnty: phase.promotes('continuity_comp', inputs=['t_duration']) if any((any_state_cnty, any_control_cnty, any_control_rate_cnty)): phase._get_subsystem('continuity_comp').configure_io() for name, options in phase.state_options.items(): phase.connect(f'state_interp.staterate_col:{name}', f'collocation_constraint.f_approx:{name}') rate_src = options['rate_source'] if rate_src in phase.parameter_options: shape = phase.parameter_options[rate_src]['shape'] param_size = np.prod(shape) ncn = self.grid_data.subset_num_nodes['col'] src_idxs = np.tile(np.arange(0, param_size, dtype=int), ncn) src_idxs = np.reshape(src_idxs, (ncn,) + shape) phase.promotes('collocation_constraint', inputs=[(f'f_computed:{name}', f'parameters:{rate_src}')], src_indices=(src_idxs,), flat_src_indices=True, src_shape=shape) else: rate_path, src_idxs = self.get_rate_source_path(name, nodes='col', phase=phase) phase.connect(rate_path, f'collocation_constraint.f_computed:{name}', src_indices=src_idxs)
Apache License 2.0
matteoferla/fragmenstein
fragmenstein/monster/_blend_place.py
_MonsterBlend.transfer_ring_data
python
def transfer_ring_data(self, donor: Chem.Atom, acceptor: Chem.Atom): pass
Transfer the info if a ringcore atom. :param donor: :param acceptor: :return:
https://github.com/matteoferla/fragmenstein/blob/151bde01f4ebd930880cb7ad234bab68ac4a3e76/fragmenstein/monster/_blend_place.py#L409-L419
__doc__ = """ This is inherited by MonsterPlace """ from rdkit.Chem import rdmolops import itertools import json from collections import Counter from collections import defaultdict from typing import Optional, Dict, List, Tuple from warnings import warn import numpy as np from rdkit import Chem from rdkit.Chem import AllChem from rdkit.Chem import rdFMCS, rdMolAlign, rdmolops from rdkit.Chem import rdmolops from rdkit.Geometry.rdGeometry import Point3D from ._communal import _MonsterCommunal from ._merge import _MonsterMerge from .unmerge_mapper import Unmerge class _MonsterBlend(_MonsterMerge): def full_blending(self) -> None: self.mol_options = [self.simply_merge_hits()] scaffold = self.posthoc_refine(self.mol_options[0]) chimera = self.make_chimera(scaffold) self.keep_copy(scaffold, 'scaffold') self.keep_copy(chimera, 'chimera') self.positioned_mol = self.place_from_map(target_mol=self.initial_mol, template_mol=chimera, atom_map=None) def partial_blending(self) -> None: self.mol_options = self.partially_blend_hits() unrefined_scaffold, mode_index = self.pick_best() used = unrefined_scaffold.GetProp('_Name').split('-') self.unmatched = [h.GetProp('_Name') for h in self.hits if h.GetProp('_Name') not in used] scaffold = self.posthoc_refine(unrefined_scaffold) chimera = self.make_chimera(scaffold, mode_index) self.keep_copy(scaffold, 'scaffold') self.keep_copy(chimera, 'chimera') self.positioned_mol = self.place_from_map(target_mol=self.positioned_mol, template_mol=chimera, atom_map=None) def no_blending(self, broad=False) -> None: maps = {} for template in self.hits: if broad: pair_atom_maps, _ = self.get_mcs_mappings(self.initial_mol, template) maps[template.GetProp('_Name')] = pair_atom_maps else: pair_atom_maps_t = self._get_atom_maps(self.initial_mol, template, atomCompare=rdFMCS.AtomCompare.CompareElements, bondCompare=rdFMCS.BondCompare.CompareOrder, ringMatchesRingOnly=True, ringCompare=rdFMCS.RingCompare.PermissiveRingFusion, matchChiralTag=True) pair_atom_maps = [dict(p) for p in pair_atom_maps_t] maps[template.GetProp('_Name')] = pair_atom_maps um = Unmerge(followup=self.initial_mol, mols=self.hits, maps=maps, no_discard=self.throw_on_discard) self.keep_copy(um.combined, 'scaffold') self.keep_copy(um.combined_bonded, 'chimera') self.unmatched = [m.GetProp('_Name') for m in um.disregarded] if self.throw_on_discard and len(self.unmatched): raise ConnectionError(f'{self.unmatched} was rejected.') self.journal.debug(f'followup to scaffold {um.combined_map}') placed = self.place_from_map(target_mol=self.initial_mol, template_mol=um.combined_bonded, atom_map=um.combined_map) alts = zip(um.combined_bonded_alternatives, um.combined_map_alternatives) placed_options = [self.place_from_map(target_mol=self.initial_mol, template_mol=mol, atom_map=mappa) for mol, mappa in alts] self.positioned_mol = self.posthoc_refine(placed) self.mol_options = [self.posthoc_refine(mol) for mol in placed_options] def partially_blend_hits(self, hits: Optional[List[Chem.Mol]] = None) -> List[Chem.Mol]: if hits is None: hits = sorted(self.hits, key=lambda h: h.GetNumAtoms(), reverse=True) for hi, hit in enumerate(hits): if not hit.HasProp('_Name') or hit.GetProp('_Name').strip() == '': hit.SetProp('_Name', f'hit{hi}') def get_dodgies(skippers): dodgy = [] for hit0, hit1, hit2 in itertools.combinations(hits, 3): hn0 = hit0.GetProp('_Name') hn1 = hit1.GetProp('_Name') hn2 = hit2.GetProp('_Name') if any([hit in skippers for hit in (hn0, hn1, hn2)]): continue for a, b in inter_mapping[(hn0, hn1)].items(): if a in inter_mapping[(hn0, hn2)] and b in inter_mapping[(hn1, hn2)]: if inter_mapping[(hn0, hn2)][a] != inter_mapping[(hn1, hn2)][b]: dodgy.extend((hn0, hn1, hn2)) d = Counter(dodgy).most_common() if dodgy: return get_dodgies(skippers=skippers + [d[0][0]]) else: return skippers inter_mapping = {} for h1, h2 in itertools.combinations(hits, 2): inter_mapping[(h1.GetProp('_Name'), h2.GetProp('_Name'))] = self.get_positional_mapping(h1, h2) dodgy_names = get_dodgies([]) self.warning(f'These combiend badly: {dodgy_names}') dodgies = [hit for hit in hits if hit.GetProp('_Name') in dodgy_names] mergituri = [hit for hit in hits if hit.GetProp('_Name') not in dodgy_names] merged = self.simply_merge_hits(mergituri) dodgies += [hit for hit in hits if hit.GetProp('_Name') in self.unmatched] self.unmatched = [] combined_dodgies = [] for h1, h2 in itertools.combinations(dodgies, 2): h_alt = Chem.Mol(h1) try: combined_dodgies.append(self.merge_pair(h_alt, h2)) except ConnectionError: pass combinations = [merged] + dodgies + combined_dodgies while self.propagate_alternatives(combinations) != 0: pass return combinations def propagate_alternatives(self, fewer): pt = Chem.GetPeriodicTable() new = 0 for template in list(fewer): for i, atom in enumerate(template.GetAtoms()): if atom.HasProp('_AltSymbol'): alt = Chem.Mol(template) aa = alt.GetAtomWithIdx(i) aa.SetAtomicNum(pt.GetAtomicNumber(atom.GetProp('_AltSymbol'))) aa.ClearProp('_AltSymbol') atom.ClearProp('_AltSymbol') fewer.append(alt) new += 1 return new def pick_best(self) -> Tuple[Chem.Mol, int]: if len(self.mol_options) == 1: return self.mol_options[0], 0 elif len(self.mol_options) == 0: raise ValueError('No scaffolds made?!') else: mapx = {} def template_sorter(t: List[Chem.Mol]) -> float: n_atoms = len(mapx[t.GetProp('_Name')][0]) mode = mapx[t.GetProp('_Name')][1] mode_i = self.matching_modes.index(mode) return - n_atoms - mode_i / 10 for template in self.mol_options: atom_maps = self._get_atom_maps(template, self.initial_mol, atomCompare=rdFMCS.AtomCompare.CompareElements, bondCompare=rdFMCS.BondCompare.CompareOrder, ringMatchesRingOnly=True, ringCompare=rdFMCS.RingCompare.PermissiveRingFusion, matchChiralTag=False) mapx[template.GetProp('_Name')] = (atom_maps, self.matching_modes[-1]) self.mol_options = sorted(self.mol_options, key=template_sorter) for template in self.mol_options[:3]: atom_map, mode = self.get_mcs_mapping(template, self.initial_mol) mapx[template.GetProp('_Name')] = (atom_map, mode) self.journal.debug(f"With {template.GetProp('_Name')}, "+ "{len(atom_map)} atoms map using mode {self.matching_modes.index(mode)}") self.mol_options = sorted(self.mol_options, key=template_sorter) best = self.mol_options[0] return best, self.matching_modes.index(mapx[best.GetProp('_Name')][1]) def make_chimera(self, template: Chem.Mol, min_mode_index=0) -> Chem.Mol: atom_map, mode = self.get_mcs_mapping(template, self.initial_mol, min_mode_index=min_mode_index) follow = {**{k: str(v) for k, v in mode.items()}, 'N_atoms': len(atom_map)} self.journal.debug(f"scaffold-followup: {follow}") chimera = Chem.RWMol(template) for scaff_ai, follow_ai in atom_map.items(): if template.GetAtomWithIdx(scaff_ai).GetSymbol() != self.initial_mol.GetAtomWithIdx( follow_ai).GetSymbol(): v = {'F': 1, 'Br': 1, 'Cl': 1, 'H': 1, 'B': 3, 'C': 4, 'N': 3, 'O': 2, 'S': 2, 'Se': 2, 'P': 6} wanted = self.initial_mol.GetAtomWithIdx(follow_ai) if wanted.GetSymbol() == '*': continue owned = template.GetAtomWithIdx(scaff_ai) diff_valance = owned.GetExplicitValence() - v[wanted.GetSymbol()] if wanted.GetSymbol() in ('F', 'Br', 'Cl', 'C', 'H') and diff_valance > 0: continue elif owned.GetExplicitValence() > 4 and wanted.GetSymbol() not in ('P',): continue else: newatom = Chem.Atom(wanted) stdev = chimera.GetAtomWithIdx(scaff_ai).GetDoubleProp('_Stdev') newatom.SetDoubleProp('_Stdev', stdev) origin = chimera.GetAtomWithIdx(scaff_ai).GetProp('_Origin') newatom.SetProp('_Origin', origin) chimera.ReplaceAtom(scaff_ai, newatom) if diff_valance > 0: chimera.GetAtomWithIdx(scaff_ai).SetFormalCharge(diff_valance) try: chimera.UpdatePropertyCache() except Chem.AtomValenceException as err: warn('Valance issue' + str(err)) return chimera def place_from_map(self, target_mol: Chem.Mol, template_mol: Chem.Mol, atom_map: Optional[Dict] = None) -> Chem.Mol: if target_mol is None: target_mol = self.initial_mol sextant = Chem.Mol(target_mol) Chem.SanitizeMol(sextant) AllChem.EmbedMolecule(sextant) AllChem.MMFFOptimizeMolecule(sextant) if atom_map is None: atom_map, mode = self.get_mcs_mapping(target_mol, template_mol) msg = {**{k: str(v) for k, v in mode.items()}, 'N_atoms': len(atom_map)} self.journal.debug(f"followup-chimera' = {msg}") rdMolAlign.AlignMol(sextant, template_mol, atomMap=list(atom_map.items()), maxIters=500) putty = Chem.Mol(sextant) pconf = putty.GetConformer() chimera_conf = template_mol.GetConformer() uniques = set() for i in range(putty.GetNumAtoms()): p_atom = putty.GetAtomWithIdx(i) p_atom.SetDoubleProp('_Stdev', 0.) p_atom.SetProp('_Origin', 'none') if i in atom_map: ci = atom_map[i] c_atom = template_mol.GetAtomWithIdx(ci) if c_atom.HasProp('_Stdev'): stdev = c_atom.GetDoubleProp('_Stdev') origin = c_atom.GetProp('_Origin') p_atom.SetDoubleProp('_Stdev', stdev) p_atom.SetProp('_Origin', origin) pconf.SetAtomPosition(i, chimera_conf.GetAtomPosition(ci)) else: uniques.add(i) categories = self._categorise(sextant, uniques) done_already = [] for unique_idx in categories['pairs']: if unique_idx in done_already: continue team = self._recruit_team(target_mol, unique_idx, categories['uniques']) other_attachments = (team & set(categories['pairs'].keys())) - {unique_idx} sights = set() for att_idx in [unique_idx] + list(other_attachments): for pd in categories['pairs'][att_idx]: first_sight = pd['idx'] sights.add((first_sight, first_sight)) neighs = [i.GetIdx() for i in sextant.GetAtomWithIdx(first_sight).GetNeighbors() if i.GetIdx() not in uniques] for n in neighs: sights.add((n, n)) if self.attachment and list(categories['dummies']) and list(categories['dummies'])[0] in team: r = list(categories['dummies'])[0] pconf.SetAtomPosition(r, self.attachment.GetConformer().GetAtomPosition(0)) sights.add((r, r)) rdMolAlign.AlignMol(sextant, putty, atomMap=list(sights), maxIters=500) sconf = sextant.GetConformer() self.journal.debug(f'alignment atoms for {unique_idx} ({team}): {sights}') for atom_idx in team: pconf.SetAtomPosition(atom_idx, sconf.GetAtomPosition(atom_idx)) for other in other_attachments: done_already.append(other) AllChem.SanitizeMol(putty) return putty
MIT License
mkwiatkowski/pythoscope
pythoscope/execution.py
Execution.serialize
python
def serialize(self, obj): return self._retrieve_or_capture(obj, self.create_serialized_object)
Return description of the given object in the form of a subclass of SerializedObject.
https://github.com/mkwiatkowski/pythoscope/blob/b4b89b77b5184b25992893e320d58de32ed987f1/pythoscope/execution.py#L92-L96
import itertools import time import types from pythoscope.serializer import BuiltinException, ImmutableObject, MapObject, UnknownObject, SequenceObject, LibraryObject, is_immutable, is_sequence, is_mapping, is_builtin_exception, is_library_object from pythoscope.store import Call, Class, Function, FunctionCall, GeneratorObject, GeneratorObjectInvocation, MethodCall, Project, UserObject from pythoscope.util import all_of_type, assert_argument_type, class_name, generator_has_ended, get_generator_from_frame, is_generator_code, map_values, module_name class Execution(object): def __init__(self, project): self.project = project self.started = time.time() self.ended = None self.captured_objects = {} self.captured_calls = [] self.call_graph = None self._preserved_objects = [] def finalize(self): self._preserved_objects = [] self.ended = time.time() self._fix_generator_objects() def destroy(self): self.destroy_references() self.captured_objects = {} self.captured_calls = [] self.call_graph = None def destroy_references(self): for obj in itertools.chain(self.captured_calls, self.captured_objects.values()): if isinstance(obj, UserObject): obj.klass.user_objects.remove(obj) elif isinstance(obj, FunctionCall): obj.definition.calls.remove(obj) elif isinstance(obj, GeneratorObject): if isinstance(obj.definition, Function): obj.definition.calls.remove(obj)
MIT License
azure/walinuxagent
azurelinuxagent/common/agent_supported_feature.py
get_agent_supported_features_list_for_extensions
python
def get_agent_supported_features_list_for_extensions(): return dict((name, feature) for name, feature in __EXTENSION_ADVERTISED_FEATURES.items() if feature.is_supported)
List of features that the GuestAgent currently supports (like Extension Telemetry Pipeline, etc) needed by Extensions. We need to send this list as environment variables when calling extension commands to inform Extensions of all the features the agent supports. :return: Dict containing all Extension supported features with the key as their names and the AgentFeature object as the value if the feature is supported by the Agent. Eg: { CRPSupportedFeatureNames.ExtensionTelemetryPipeline: _ETPFeature() }
https://github.com/azure/walinuxagent/blob/e65b6d5f02cf298c14df3d2a344f5ae39739698c/azurelinuxagent/common/agent_supported_feature.py#L110-L121
class SupportedFeatureNames(object): MultiConfig = "MultipleExtensionsPerHandler" ExtensionTelemetryPipeline = "ExtensionTelemetryPipeline" class AgentSupportedFeature(object): def __init__(self, name, version="1.0", supported=False): self.__name = name self.__version = version self.__supported = supported @property def name(self): return self.__name @property def version(self): return self.__version @property def is_supported(self): return self.__supported class _MultiConfigFeature(AgentSupportedFeature): __NAME = SupportedFeatureNames.MultiConfig __VERSION = "1.0" __SUPPORTED = True def __init__(self): super(_MultiConfigFeature, self).__init__(name=_MultiConfigFeature.__NAME, version=_MultiConfigFeature.__VERSION, supported=_MultiConfigFeature.__SUPPORTED) class _ETPFeature(AgentSupportedFeature): __NAME = SupportedFeatureNames.ExtensionTelemetryPipeline __VERSION = "1.0" __SUPPORTED = True def __init__(self): super(_ETPFeature, self).__init__(name=self.__NAME, version=self.__VERSION, supported=self.__SUPPORTED) __CRP_ADVERTISED_FEATURES = { SupportedFeatureNames.MultiConfig: _MultiConfigFeature() } __EXTENSION_ADVERTISED_FEATURES = { SupportedFeatureNames.ExtensionTelemetryPipeline: _ETPFeature() } def get_supported_feature_by_name(feature_name): if feature_name in __CRP_ADVERTISED_FEATURES: return __CRP_ADVERTISED_FEATURES[feature_name] if feature_name in __EXTENSION_ADVERTISED_FEATURES: return __EXTENSION_ADVERTISED_FEATURES[feature_name] raise NotImplementedError("Feature with Name: {0} not found".format(feature_name)) def get_agent_supported_features_list_for_crp(): return dict((name, feature) for name, feature in __CRP_ADVERTISED_FEATURES.items() if feature.is_supported)
Apache License 2.0
ganeti/ganeti
lib/cmdlib/instance_storage.py
_GenerateDRBD8Branch
python
def _GenerateDRBD8Branch(lu, primary_uuid, secondary_uuid, size, vgnames, names, iv_name, forthcoming=False): assert len(vgnames) == len(names) == 2 port = lu.cfg.AllocatePort() shared_secret = lu.cfg.GenerateDRBDSecret(lu.proc.GetECId()) dev_data = objects.Disk(dev_type=constants.DT_PLAIN, size=size, logical_id=(vgnames[0], names[0]), nodes=[primary_uuid, secondary_uuid], params={}, forthcoming=forthcoming) dev_data.uuid = lu.cfg.GenerateUniqueID(lu.proc.GetECId()) dev_meta = objects.Disk(dev_type=constants.DT_PLAIN, size=constants.DRBD_META_SIZE, logical_id=(vgnames[1], names[1]), nodes=[primary_uuid, secondary_uuid], params={}, forthcoming=forthcoming) dev_meta.uuid = lu.cfg.GenerateUniqueID(lu.proc.GetECId()) drbd_uuid = lu.cfg.GenerateUniqueID(lu.proc.GetECId()) minors = lu.cfg.AllocateDRBDMinor([primary_uuid, secondary_uuid], drbd_uuid) assert len(minors) == 2 drbd_dev = objects.Disk(dev_type=constants.DT_DRBD8, size=size, logical_id=(primary_uuid, secondary_uuid, port, minors[0], minors[1], shared_secret), children=[dev_data, dev_meta], nodes=[primary_uuid, secondary_uuid], iv_name=iv_name, params={}, forthcoming=forthcoming) drbd_dev.uuid = drbd_uuid return drbd_dev
Generate a drbd8 device complete with its children.
https://github.com/ganeti/ganeti/blob/4d21019c72cba4d746f5d17ca22098f4c7682e9c/lib/cmdlib/instance_storage.py#L543-L576
import itertools import logging import os import time from ganeti import compat from ganeti import constants from ganeti import errors from ganeti import ht from ganeti import locking from ganeti.masterd import iallocator from ganeti import objects from ganeti import utils import ganeti.rpc.node as rpc from ganeti.cmdlib.base import LogicalUnit, NoHooksLU, Tasklet from ganeti.cmdlib.common import INSTANCE_DOWN, INSTANCE_NOT_RUNNING, AnnotateDiskParams, CheckIAllocatorOrNode, ExpandNodeUuidAndName, ComputeIPolicyDiskSizesViolation, CheckNodeOnline, CheckInstanceNodeGroups, CheckInstanceState, IsExclusiveStorageEnabledNode, FindFaultyInstanceDisks, GetWantedNodes, CheckDiskTemplateEnabled from ganeti.cmdlib.instance_utils import GetInstanceInfoText, CopyLockList, ReleaseLocks, CheckNodeVmCapable, BuildInstanceHookEnvByObject, CheckNodeNotDrained, CheckTargetNodeIPolicy import ganeti.masterd.instance _DISK_TEMPLATE_NAME_PREFIX = { constants.DT_PLAIN: "", constants.DT_RBD: ".rbd", constants.DT_EXT: ".ext", constants.DT_FILE: ".file", constants.DT_SHARED_FILE: ".sharedfile", } def CreateSingleBlockDev(lu, node_uuid, instance, device, info, force_open, excl_stor): result = lu.rpc.call_blockdev_create(node_uuid, (device, instance), device.size, instance.name, force_open, info, excl_stor) result.Raise("Can't create block device %s on" " node %s for instance %s" % (device, lu.cfg.GetNodeName(node_uuid), instance.name)) def _CreateBlockDevInner(lu, node_uuid, instance, device, force_create, info, force_open, excl_stor): created_devices = [] try: if device.CreateOnSecondary(): force_create = True if device.children: for child in device.children: devs = _CreateBlockDevInner(lu, node_uuid, instance, child, force_create, info, force_open, excl_stor) created_devices.extend(devs) if not force_create: return created_devices CreateSingleBlockDev(lu, node_uuid, instance, device, info, force_open, excl_stor) created_devices = [(node_uuid, device)] return created_devices except errors.DeviceCreationError as e: e.created_devices.extend(created_devices) raise e except errors.OpExecError as e: raise errors.DeviceCreationError(str(e), created_devices) def IsExclusiveStorageEnabledNodeUuid(cfg, node_uuid): ni = cfg.GetNodeInfo(node_uuid) if ni is None: raise errors.OpPrereqError("Invalid node UUID %s" % node_uuid, errors.ECODE_NOENT) return IsExclusiveStorageEnabledNode(cfg, ni) def _CreateBlockDev(lu, node_uuid, instance, device, force_create, info, force_open): (disk,) = AnnotateDiskParams(instance, [device], lu.cfg) excl_stor = IsExclusiveStorageEnabledNodeUuid(lu.cfg, node_uuid) return _CreateBlockDevInner(lu, node_uuid, instance, disk, force_create, info, force_open, excl_stor) def _UndoCreateDisks(lu, disks_created, instance): for (node_uuid, disk) in disks_created: result = lu.rpc.call_blockdev_remove(node_uuid, (disk, instance)) result.Warn("Failed to remove newly-created disk %s on node %s" % (disk, lu.cfg.GetNodeName(node_uuid)), logging.warning) def CreateDisks(lu, instance, disk_template=None, to_skip=None, target_node_uuid=None, disks=None): info = GetInstanceInfoText(instance) if disks is None: disks = lu.cfg.GetInstanceDisks(instance.uuid) if target_node_uuid is None: pnode_uuid = instance.primary_node all_node_uuids = [] for disk in disks: all_node_uuids.extend(disk.all_nodes) all_node_uuids = set(all_node_uuids) all_node_uuids.discard(pnode_uuid) all_node_uuids = [pnode_uuid] + list(all_node_uuids) else: pnode_uuid = target_node_uuid all_node_uuids = [pnode_uuid] if disk_template is None: disk_template = utils.GetDiskTemplate(disks) if disk_template == constants.DT_MIXED: raise errors.OpExecError("Creating disk for '%s' instances " "only possible with explicit disk template." % (constants.DT_MIXED,)) CheckDiskTemplateEnabled(lu.cfg.GetClusterInfo(), disk_template) if disk_template in constants.DTS_FILEBASED: file_storage_dir = os.path.dirname(disks[0].logical_id[1]) result = lu.rpc.call_file_storage_dir_create(pnode_uuid, file_storage_dir) result.Raise("Failed to create directory '%s' on" " node %s" % (file_storage_dir, lu.cfg.GetNodeName(pnode_uuid))) disks_created = [] for idx, device in enumerate(disks): if to_skip and idx in to_skip: continue logging.info("Creating disk %s for instance '%s'", idx, instance.name) for node_uuid in all_node_uuids: f_create = node_uuid == pnode_uuid try: _CreateBlockDev(lu, node_uuid, instance, device, f_create, info, f_create) disks_created.append((node_uuid, device)) except errors.DeviceCreationError as e: logging.warning("Creating disk %s for instance '%s' failed", idx, instance.name) disks_created.extend(e.created_devices) _UndoCreateDisks(lu, disks_created, instance) raise errors.OpExecError(e.message) return disks_created def ComputeDiskSizePerVG(disk_template, disks): def _compute(disks, payload): vgs = {} for disk in disks: vg_name = disk[constants.IDISK_VG] vgs[vg_name] = vgs.get(vg_name, 0) + disk[constants.IDISK_SIZE] + payload return vgs req_size_dict = { constants.DT_DISKLESS: {}, constants.DT_PLAIN: _compute(disks, 0), constants.DT_DRBD8: _compute(disks, constants.DRBD_META_SIZE), constants.DT_FILE: {}, constants.DT_SHARED_FILE: {}, constants.DT_GLUSTER: {}, } if disk_template not in req_size_dict: raise errors.ProgrammerError("Disk template '%s' size requirement" " is unknown" % disk_template) return req_size_dict[disk_template] def ComputeDisks(disks, disk_template, default_vg): new_disks = [] for disk in disks: mode = disk.get(constants.IDISK_MODE, constants.DISK_RDWR) if mode not in constants.DISK_ACCESS_SET: raise errors.OpPrereqError("Invalid disk access mode '%s'" % mode, errors.ECODE_INVAL) size = disk.get(constants.IDISK_SIZE, None) if size is None: raise errors.OpPrereqError("Missing disk size", errors.ECODE_INVAL) try: size = int(size) except (TypeError, ValueError): raise errors.OpPrereqError("Invalid disk size '%s'" % size, errors.ECODE_INVAL) CheckDiskExtProvider(disk, disk_template) data_vg = disk.get(constants.IDISK_VG, default_vg) name = disk.get(constants.IDISK_NAME, None) if name is not None and name.lower() == constants.VALUE_NONE: name = None new_disk = { constants.IDISK_SIZE: size, constants.IDISK_MODE: mode, constants.IDISK_VG: data_vg, constants.IDISK_NAME: name, constants.IDISK_TYPE: disk_template, } for key in [ constants.IDISK_METAVG, constants.IDISK_ADOPT, constants.IDISK_SPINDLES, ]: if key in disk: new_disk[key] = disk[key] if (disk_template in constants.DTS_HAVE_ACCESS and constants.IDISK_ACCESS in disk): new_disk[constants.IDISK_ACCESS] = disk[constants.IDISK_ACCESS] if disk_template == constants.DT_EXT: new_disk[constants.IDISK_PROVIDER] = disk[constants.IDISK_PROVIDER] for key in disk: if key not in constants.IDISK_PARAMS: new_disk[key] = disk[key] new_disks.append(new_disk) return new_disks def ComputeDisksInfo(disks, disk_template, default_vg, ext_params): for key in ext_params: if key != constants.IDISK_PROVIDER: assert key not in constants.IDISK_PARAMS, "Invalid extstorage parameter '%s'" % key inst_disks = [dict((key, value) for key, value in disk.items() if key in constants.IDISK_PARAMS) for disk in map(objects.Disk.ToDict, disks)] for disk in inst_disks: disk.update(ext_params) new_disks = ComputeDisks(inst_disks, disk_template, default_vg) for disk, new_disk in zip(disks, new_disks): if (disk.dev_type == disk_template and disk_template == constants.DT_EXT): provider = new_disk[constants.IDISK_PROVIDER] if provider == disk.params[constants.IDISK_PROVIDER]: raise errors.OpPrereqError("Not converting, '%s' of type ExtStorage" " already using provider '%s'" % (disk.iv_name, provider), errors.ECODE_INVAL) if (disk_template in constants.DTS_HAVE_ACCESS and constants.IDISK_ACCESS in disk.params): new_disk[constants.IDISK_ACCESS] = disk.params[constants.IDISK_ACCESS] if disk_template in constants.DTS_LVM: if disk.dev_type == constants.DT_PLAIN: new_disk[constants.IDISK_VG] = disk.logical_id[0] elif disk.dev_type == constants.DT_DRBD8: new_disk[constants.IDISK_VG] = disk.children[0].logical_id[0] return new_disks def CalculateFileStorageDir(disk_type, cfg, instance_name, file_storage_dir=None): instance_file_storage_dir = None if disk_type in constants.DTS_FILEBASED: joinargs = [] cfg_storage = None if disk_type == constants.DT_FILE: cfg_storage = cfg.GetFileStorageDir() elif disk_type == constants.DT_SHARED_FILE: cfg_storage = cfg.GetSharedFileStorageDir() elif disk_type == constants.DT_GLUSTER: cfg_storage = cfg.GetGlusterStorageDir() if not cfg_storage: raise errors.OpPrereqError( "Cluster file storage dir for {tpl} storage type not defined".format( tpl=repr(disk_type) ), errors.ECODE_STATE) joinargs.append(cfg_storage) if file_storage_dir is not None: joinargs.append(file_storage_dir) if disk_type != constants.DT_GLUSTER: joinargs.append(instance_name) if len(joinargs) > 1: instance_file_storage_dir = utils.PathJoin(*joinargs) else: instance_file_storage_dir = joinargs[0] return instance_file_storage_dir def CheckRADOSFreeSpace(): pass
BSD 2-Clause Simplified License
krfricke/rl-benchmark
rl_benchmark/data/benchmark_data.py
BenchmarkData.from_file_or_hash
python
def from_file_or_hash(benchmark_lookup, db=None): if isinstance(db, list): dbs = db else: dbs = [db] if isinstance(benchmark_lookup, str) and len(benchmark_lookup) == 40: for db in dbs: if not db: continue benchmark_data = db.get_benchmark(benchmark_lookup) if benchmark_data: return benchmark_data if hasattr(benchmark_lookup, 'readline') or os.path.exists(benchmark_lookup): return BenchmarkData.from_file(benchmark_lookup) else: raise ValueError("Could not find benchmark in db and fs: {}".format(benchmark_lookup))
Load benchmark data from file or hash. First checks database(s) for hash, then files. Returns first match. Args: benchmark_lookup: string of filename, or file object, or local db hash db: `BenchmarkDatabase` object or list or `BenchmarkDatabase` objects Returns: BenchmarkData object
https://github.com/krfricke/rl-benchmark/blob/73f16255452beb2cd91f7f1d1a6b0e76a3a511c8/rl_benchmark/data/benchmark_data.py#L42-L70
from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import os import pickle from rl_benchmark.data import ExperimentData class BenchmarkData(list): def __iter__(self): for item in super(BenchmarkData, self).__iter__(): yield ExperimentData(item) def __getitem__(self, item): return ExperimentData(super(BenchmarkData, self).__getitem__(item)) def min_x(self, var): values = list() for experiment_data in self: values.append(np.max(experiment_data.extended_results()[var])) return np.min(values) @staticmethod
Apache License 2.0
tcalmant/ipopo
pelix/framework.py
Bundle._registered_service
python
def _registered_service(self, registration): with self.__registration_lock: self.__registered_services.add(registration)
Bundle is notified by the framework that a service has been registered in the name of this bundle. :param registration: The service registration object
https://github.com/tcalmant/ipopo/blob/1d4b81207e67890dfccc8f562336c7104f194c17/pelix/framework.py#L267-L276
import collections import importlib import inspect import logging import os import sys import threading import uuid try: from typing import Any, List, Optional, Set, Union import types except ImportError: pass from pelix.constants import ( ACTIVATOR, ACTIVATOR_LEGACY, FRAMEWORK_UID, OSGI_FRAMEWORK_UUID, BundleException, FrameworkException, ) from pelix.internals.events import BundleEvent, ServiceEvent from pelix.internals.registry import ( EventDispatcher, ServiceRegistry, ServiceReference, ServiceRegistration, ) from pelix.utilities import is_string if hasattr(importlib, "reload"): def reload_module(module_): return importlib.reload(module_) else: import imp def reload_module(module_): return imp.reload(module_) def walk_modules(path): if path is None or not os.path.isdir(path): return yielded = set() try: file_names = os.listdir(path) except OSError: file_names = [] file_names.sort() for filename in file_names: modname = inspect.getmodulename(filename) if modname == "__init__" or modname in yielded: continue file_path = os.path.join(path, filename) is_package = False if not modname and os.path.isdir(file_path) and "." not in filename: modname = filename try: dir_contents = os.listdir(file_path) except OSError: dir_contents = [] for sub_filename in dir_contents: sub_name = inspect.getmodulename(sub_filename) if sub_name == "__init__": is_package = True break else: continue if modname and "." not in modname: yielded.add(modname) yield modname, is_package __version_info__ = (1, 0, 1) __version__ = ".".join(str(x) for x in __version_info__) __docformat__ = "restructuredtext en" _logger = logging.getLogger("pelix.main") class Bundle(object): __slots__ = ( "_lock", "__context", "__id", "__module", "__name", "__framework", "_state", "__registered_services", "__registration_lock", ) UNINSTALLED = 1 INSTALLED = 2 RESOLVED = 4 STARTING = 8 STOPPING = 16 ACTIVE = 32 def __init__(self, framework, bundle_id, name, module_): self._lock = threading.RLock() self.__context = BundleContext(framework, self) self.__id = bundle_id self.__module = module_ self.__name = name self.__framework = framework self._state = Bundle.RESOLVED self.__registered_services = set() self.__registration_lock = threading.Lock() def __str__(self): return "Bundle(ID={0}, Name={1})".format(self.__id, self.__name) def __get_activator_method(self, method_name): activator = getattr(self.__module, ACTIVATOR, None) if activator is None: activator = getattr(self.__module, ACTIVATOR_LEGACY, None) if activator is not None: _logger.warning( "Bundle %s uses the deprecated '%s' to declare" " its activator. Use @BundleActivator instead.", self.__name, ACTIVATOR_LEGACY, ) return getattr(activator, method_name, None) def _fire_bundle_event(self, kind): self.__framework._dispatcher.fire_bundle_event(BundleEvent(kind, self))
Apache License 2.0
trojai/trojai
trojai/modelgen/config.py
ModelGeneratorConfig.__getstate__
python
def __getstate__(self): return {'arch_factory': self.arch_factory, 'data': self.data, 'model_save_dir': self.model_save_dir, 'stats_save_dir': self.stats_save_dir, 'num_models': self.num_models, 'arch_factory_kwargs': self.arch_factory_kwargs, 'arch_factory_kwargs_generator': self.arch_factory_kwargs_generator, 'parallel': self.parallel, 'amp': self.amp, 'experiment_cfg': self.experiment_cfg, 'run_ids': self.run_ids, 'filenames': self.filenames, 'save_with_hash': self.save_with_hash }
Function which dictates which objects will be saved when pickling the ModelGeneratorConfig object. This is only useful for the UGEModelGenerator, which needs to save the data before parallelizing a job. :return: a dictionary of the state of the ModelGeneratorConfig object.
https://github.com/trojai/trojai/blob/843f63aa1f7027139bfc97feac1be8433b1f7f69/trojai/modelgen/config.py#L913-L932
import collections.abc import copy import importlib import logging import os from abc import ABC, abstractmethod from typing import Callable from typing import Union, Sequence, Any import math import cloudpickle as pickle import numpy as np import torch from .architecture_factory import ArchitectureFactory from .constants import VALID_LOSS_FUNCTIONS, VALID_DEVICES, VALID_OPTIMIZERS from .data_manager import DataManager from .optimizer_interface import OptimizerInterface logger = logging.getLogger(__name__) def identity_function(x): return x default_soft_to_hard_fn_kwargs = dict() class DefaultSoftToHardFn: def __init__(self): pass def __call__(self, y_hat, *args, **kwargs): return torch.argmax(y_hat, dim=1) def __repr__(self): return "torch.argmax(y_hat, dim=1)" class ConfigInterface(ABC): @abstractmethod def __deepcopy__(self, memodict={}): pass class OptimizerConfigInterface(ConfigInterface): @abstractmethod def get_device_type(self): pass def save(self, fname): pass @staticmethod @abstractmethod def load(fname): pass class EarlyStoppingConfig(ConfigInterface): def __init__(self, num_epochs: int = 5, val_loss_eps: float = 1e-3): self.num_epochs = num_epochs self.val_loss_eps = val_loss_eps self.validate() def validate(self): if not isinstance(self.num_epochs, int) or self.num_epochs < 2: msg = "num_epochs to monitor must be an integer > 1!" logger.error(msg) raise ValueError(msg) try: self.val_loss_eps = float(self.val_loss_eps) except ValueError: msg = "val_loss_eps must be a float" logger.error(msg) raise ValueError(msg) if self.val_loss_eps < 0: msg = "val_loss_eps must be >= 0!" logger.error(msg) raise ValueError(msg) def __deepcopy__(self, memodict={}): return EarlyStoppingConfig(self.num_epochs, self.val_loss_eps) def __eq__(self, other): if self.num_epochs == other.num_epochs and math.isclose(self.val_loss_eps, other.val_acc_eps): return True else: return False def __str__(self): return "ES[%d:%0.02f]" % (self.num_epochs, self.val_loss_eps) class TrainingConfig(ConfigInterface): def __init__(self, device: Union[str, torch.device] = 'cpu', epochs: int = 10, batch_size: int = 32, lr: float = 1e-4, optim: Union[str, OptimizerInterface] = 'adam', optim_kwargs: dict = None, objective: Union[str, Callable] = 'cross_entropy_loss', objective_kwargs: dict = None, save_best_model: bool = False, train_val_split: float = 0.05, val_data_transform: Callable[[Any], Any] = None, val_label_transform: Callable[[int], int] = None, val_dataloader_kwargs: dict = None, early_stopping: EarlyStoppingConfig = None, soft_to_hard_fn: Callable = None, soft_to_hard_fn_kwargs: dict = None, lr_scheduler: Any = None, lr_scheduler_init_kwargs: dict = None, lr_scheduler_call_arg: Any = None, clip_grad: bool = False, clip_type: str = "norm", clip_val: float = 1., clip_kwargs: dict = None, adv_training_eps: float = None, adv_training_iterations: int = None, adv_training_ratio: float = None) -> None: self.device = device self.epochs = epochs self.batch_size = batch_size self.lr = lr self.optim = optim self.optim_kwargs = optim_kwargs self.objective = objective self.objective_kwargs = objective_kwargs self.save_best_model = save_best_model self.train_val_split = train_val_split self.early_stopping = early_stopping self.val_data_transform = val_data_transform self.val_label_transform = val_label_transform self.val_dataloader_kwargs = val_dataloader_kwargs self.soft_to_hard_fn = soft_to_hard_fn self.soft_to_hard_fn_kwargs = soft_to_hard_fn_kwargs self.lr_scheduler = lr_scheduler self.lr_scheduler_init_kwargs = lr_scheduler_init_kwargs self.lr_scheduler_call_arg = lr_scheduler_call_arg self.clip_grad = clip_grad self.clip_type = clip_type self.clip_val = clip_val self.clip_kwargs = clip_kwargs self.adv_training_eps = adv_training_eps self.adv_training_iterations = adv_training_iterations self.adv_training_ratio = adv_training_ratio if self.adv_training_eps is None: self.adv_training_eps = float(0.0) if self.adv_training_ratio is None: self.adv_training_ratio = float(0.0) if self.adv_training_iterations is None: self.adv_training_iterations = int(0) if self.optim_kwargs is None: self.optim_kwargs = {} if self.lr_scheduler_init_kwargs is None: self.lr_scheduler_init_kwargs = {} if self.clip_kwargs is None: self.clip_kwargs = {} self.validate() if isinstance(self.device, str): self.device = torch.device(self.device) def validate(self) -> None: if not isinstance(self.device, torch.device) and self.device not in VALID_DEVICES: msg = "device must be either a torch.device object, or one of the following:" + str(VALID_DEVICES) logger.error(msg) raise ValueError(msg) if not isinstance(self.epochs, int) or self.epochs < 1: msg = "epochs must be an integer > 0" logger.error(msg) raise ValueError(msg) if not isinstance(self.batch_size, int) or self.batch_size < 1: msg = "batch_size must be an integer > 0" logger.error(msg) raise ValueError(msg) if not isinstance(self.lr, float): msg = "lr must be a float!" logger.error(msg) raise ValueError(msg) if not isinstance(self.optim, OptimizerInterface) and self.optim not in VALID_OPTIMIZERS: msg = "optim must be either a OptimizerInterface object, or one of the following:" + str(VALID_OPTIMIZERS) logger.error(msg) raise ValueError(msg) if not isinstance(self.optim_kwargs, dict): msg = "optim_kwargs must be a dictionary!" logger.error(msg) raise ValueError(msg) if not callable(self.objective) and self.objective not in VALID_LOSS_FUNCTIONS: msg = "objective must be a callable, or one of the following:" + str(VALID_LOSS_FUNCTIONS) logger.error(msg) raise ValueError(msg) if not self.objective_kwargs: self.objective_kwargs = dict() elif not isinstance(self.objective_kwargs, dict): msg = "objective_kwargs must be a dictionary" logger.error(msg) raise ValueError(msg) if not isinstance(self.save_best_model, bool): msg = "save_best_model must be a boolean!" logger.error(msg) raise ValueError(msg) if not isinstance(self.train_val_split, float): msg = "train_val_split must a float between 0 and 1!" logger.error(msg) raise ValueError(msg) else: if self.train_val_split < 0 or self.train_val_split > 1: msg = "train_val_split must be between 0 and 1, inclusive" logger.error(msg) raise ValueError(msg) if self.early_stopping is not None and not isinstance(self.early_stopping, EarlyStoppingConfig): msg = "early_stopping must be of type EarlyStoppingConfig or None" logger.error(msg) raise ValueError(msg) if self.adv_training_eps < 0 or self.adv_training_eps > 1: msg = "Adversarial training eps: {} must be between 0 and 1.".format(self.adv_training_eps) logger.error(msg) raise ValueError(msg) if self.adv_training_ratio < 0 or self.adv_training_ratio > 1: msg = "Adversarial training ratio (percent of images with perturbation applied): {} must be between 0 and 1.".format(self.adv_training_ratio) logger.error(msg) raise ValueError(msg) if self.adv_training_iterations < 0: msg = "Adversarial training iteration count: {} must be greater than or equal to 0.".format(self.adv_training_iterations) logger.error(msg) raise ValueError(msg) if self.val_data_transform is not None and not callable(self.val_data_transform): raise TypeError("Expected a function for argument 'val_data_transform', " "instead got type: {}".format(type(self.val_data_transform))) if self.val_label_transform is not None and not callable(self.val_label_transform): raise TypeError("Expected a function for argument 'val_label_transform', " "instead got type: {}".format(type(self.val_label_transform))) if self.val_dataloader_kwargs is not None and not isinstance(self.val_dataloader_kwargs, dict): msg = "val_dataloader_kwargs must be a dictionary or None!" logger.error(msg) raise ValueError(msg) if self.soft_to_hard_fn is None: self.soft_to_hard_fn = DefaultSoftToHardFn() elif not callable(self.soft_to_hard_fn): msg = "soft_to_hard_fn must be a callable which accepts as input the output of the model, and outputs " "hard-decisions" logger.error(msg) raise ValueError(msg) if self.soft_to_hard_fn_kwargs is None: self.soft_to_hard_fn_kwargs = copy.deepcopy(default_soft_to_hard_fn_kwargs) elif not isinstance(self.soft_to_hard_fn_kwargs, dict): msg = "soft_to_hard_fn_kwargs must be a dictionary of kwargs to pass to soft_to_hard_fn" logger.error(msg) raise ValueError(msg) if self.lr_scheduler_call_arg is not None and self.lr_scheduler_call_arg != 'val_acc' and self.lr_scheduler_call_arg != 'val_loss': msg = "lr_scheduler_call_arg must be one of: None, val_acc, val_loss" logger.error(msg) raise ValueError(msg) if not isinstance(self.clip_grad, bool): msg = "clip_grad must be a bool!" logger.error(msg) raise ValueError(msg) if not isinstance(self.clip_type, str) or (self.clip_type != 'norm' and self.clip_type != 'val'): msg = "clip type must be a string, either norm or val" logger.error(msg) raise ValueError(msg) if not isinstance(self.clip_val, float): msg = "clip_val must be a float" logger.error(msg) raise ValueError(msg) if not isinstance(self.clip_kwargs, dict): msg = "clip_kwargs must be a dict" logger.error(msg) raise ValueError(msg) def get_cfg_as_dict(self): output_dict = dict(device=str(self.device.type), epochs=self.epochs, batch_size=self.batch_size, learning_rate=self.lr, optim=self.optim, objective=self.objective, objective_kwargs=self.objective_kwargs, save_best_model=self.save_best_model, early_stopping=str(self.early_stopping), val_data_transform=self.val_data_transform, val_label_transform=self.val_label_transform, val_dataloader_kwargs=self.val_dataloader_kwargs, soft_to_hard_fn=self.soft_to_hard_fn, soft_to_hard_fn_kwargs=self.soft_to_hard_fn_kwargs, lr_scheduler=self.lr_scheduler, lr_scheduler_init_kwargs=self.lr_scheduler_init_kwargs, lr_scheduler_call_arg=self.lr_scheduler_call_arg, clip_grad=self.clip_grad, clip_type=self.clip_type, clip_val=self.clip_val, clip_kwargs=self.clip_kwargs, adv_training_eps = self.adv_training_eps, adv_training_iterations = self.adv_training_iterations, adv_training_ratio = self.adv_training_ratio) return output_dict def __str__(self): str_repr = "TrainingConfig: device[%s], num_epochs[%d], batch_size[%d], learning_rate[%.5e], adv_training_eps[%s], adv_training_iterations[%s], adv_training_ratio[%s], optimizer[%s], " "objective[%s], objective_kwargs[%s], train_val_split[%0.02f], val_data_transform[%s], " "val_label_transform[%s], val_dataloader_kwargs[%s], early_stopping[%s], " "soft_to_hard_fn[%s], soft_to_hard_fn_kwargs[%s], " "lr_scheduler[%s], lr_scheduler_init_kwargs[%s], lr_scheduler_call_arg[%s], " "clip_grad[%s] clip_type[%s] clip_val[%s] clip_kwargs[%s]" % (str(self.device.type), self.epochs, self.batch_size, self.lr, self.adv_training_eps, self.adv_training_iterations, self.adv_training_ratio, str(self.optim), str(self.objective), str( self.objective_kwargs), self.train_val_split, str(self.val_data_transform), str(self.val_label_transform), str( self.val_dataloader_kwargs), str(self.early_stopping), str(self.soft_to_hard_fn), str( self.soft_to_hard_fn_kwargs), str(self.lr_scheduler), str(self.lr_scheduler_init_kwargs), str( self.lr_scheduler_call_arg), str(self.clip_grad), str(self.clip_type), str(self.clip_val), str(self.clip_kwargs)) return str_repr def __deepcopy__(self, memodict={}): new_device = self.device.type epochs = self.epochs batch_size = self.batch_size lr = self.lr save_best_model = self.save_best_model train_val_split = self.train_val_split early_stopping = copy.deepcopy(self.early_stopping) val_data_transform = copy.deepcopy(self.val_data_transform) val_label_transform = copy.deepcopy(self.val_label_transform) val_dataloader_kwargs = copy.deepcopy(self.val_dataloader_kwargs) if isinstance(self.optim, str): optim = self.optim elif isinstance(self.optim, OptimizerInterface): optim = copy.deepcopy(self.optim) else: msg = "The TrainingConfig object you are trying to copy is corrupted!" logger.error(msg) raise ValueError(msg) optim_kwargs = self.optim_kwargs if isinstance(self.objective, str): objective = self.objective elif callable(self.objective): objective = copy.deepcopy(self.objective) else: msg = "The TrainingConfig object you are trying to copy is corrupted!" logger.error(msg) raise ValueError(msg) objective_kwargs = self.objective_kwargs soft_to_hard_fn = copy.deepcopy(self.soft_to_hard_fn) soft_to_hard_fn_kwargs = copy.deepcopy(self.soft_to_hard_fn_kwargs) lr_scheduler = self.lr_scheduler lr_scheduler_kwargs = copy.deepcopy(self.lr_scheduler_init_kwargs) lr_scheduler_call_arg = self.lr_scheduler_call_arg clip_grad = self.clip_grad clip_type = self.clip_type clip_val = self.clip_val clip_kwargs = copy.deepcopy(self.clip_kwargs) adv_training_eps = self.adv_training_eps adv_training_iterations = self.adv_training_iterations adv_training_ratio = self.adv_training_ratio return TrainingConfig(new_device, epochs, batch_size, lr, optim, optim_kwargs, objective, objective_kwargs, save_best_model, train_val_split, val_data_transform, val_label_transform, val_dataloader_kwargs, early_stopping, soft_to_hard_fn, soft_to_hard_fn_kwargs, lr_scheduler, lr_scheduler_kwargs, lr_scheduler_call_arg, clip_grad, clip_type, clip_val, clip_kwargs, adv_training_eps, adv_training_iterations, adv_training_ratio) def __eq__(self, other): if self.device.type == other.device.type and self.epochs == other.epochs and self.batch_size == other.batch_size and self.lr == other.lr and self.save_best_model == other.save_best_model and self.train_val_split == other.train_val_split and self.early_stopping == other.early_stopping and self.val_data_transform == other.val_data_transform and self.val_label_transform == other.val_label_transform and self.val_dataloader_kwargs == other.val_dataloader_kwargs and self.soft_to_hard_fn_kwargs == other.soft_to_hard_fn_kwargs and self.lr_scheduler_init_kwargs == other.lr_scheduler_init_kwargs and self.lr_scheduler_call_arg == other.lr_scheduler_call_arg and self.clip_grad == other.clip_grad and self.clip_type == other.clip_type and self.adv_training_eps == other.adv_training_eps and self.adv_training_iterations == other.adv_training_iterations and self.adv_training_ratio == other.adv_training_ratio and self.clip_val == other.clip_val and self.clip_kwargs == other.clip_kwargs: if self.optim == other.optim and self.objective == other.objective: return True else: return False else: return False class ReportingConfig(ConfigInterface): def __init__(self, num_batches_per_logmsg: int = 100, disable_progress_bar: bool = False, num_epochs_per_metric: int = 1, num_batches_per_metrics: int = 50, tensorboard_output_dir: str = None, experiment_name: str = 'experiment'): self.num_batches_per_logmsg = num_batches_per_logmsg self.disable_progress_bar = disable_progress_bar self.num_epochs_per_metrics = num_epochs_per_metric self.num_batches_per_metrics = num_batches_per_metrics self.tensorboard_output_dir = tensorboard_output_dir self.experiment_name = experiment_name self.validate() def validate(self): if not isinstance(self.num_batches_per_logmsg, int) or self.num_batches_per_logmsg < 0: msg = "num_batches_per_logmsg must be an integer > 0" logger.error(msg) raise ValueError(msg) if not isinstance(self.num_epochs_per_metrics, int) or self.num_epochs_per_metrics < 0: msg = "num_epochs_per_metrics must be an integer > 0" logger.error(msg) raise ValueError(msg) if self.num_batches_per_metrics is not None and (not isinstance(self.num_batches_per_metrics, int) or self.num_batches_per_metrics < 0): msg = "num_batches_per_metrics must be an integer > 0 or None!" logger.error(msg) raise ValueError(msg) def __str__(self): str_repr = "ReportingConfig: num_batches/log_msg[%d], num_epochs/metric[%d], num_batches/metric[%d], " "tensorboard_dir[%s] experiment_name=[%s], disable_progress_bar=[%s]" % (self.num_batches_per_logmsg, self.num_epochs_per_metrics, self.num_batches_per_metrics, self.tensorboard_output_dir, self.experiment_name, self.disable_progress_bar) return str_repr def __copy__(self): return ReportingConfig(self.num_batches_per_logmsg, self.disable_progress_bar, self.num_epochs_per_metrics, self.num_batches_per_metrics, self.tensorboard_output_dir, self.experiment_name) def __deepcopy__(self, memodict={}): return self.__copy__() def __eq__(self, other): if self.num_batches_per_logmsg == other.num_batches_per_logmsg and self.disable_progress_bar == other.disable_progress_bar and self.num_epochs_per_metrics == other.num_epochs_per_metrics and self.num_batches_per_metrics == other.num_batches_per_metrics and self.tensorboard_output_dir == other.tensorboard_output_dir and self.experiment_name == other.experiment_name: return True else: return False class TorchTextOptimizerConfig(OptimizerConfigInterface): def __init__(self, training_cfg: TrainingConfig = None, reporting_cfg: ReportingConfig = None, copy_pretrained_embeddings: bool = False): self.training_cfg = training_cfg self.reporting_cfg = reporting_cfg self.copy_pretrained_embeddings = copy_pretrained_embeddings self.validate() def validate(self): if self.training_cfg is None: logger.debug( "Using default training configuration to setup Optimizer!") self.training_cfg = TrainingConfig() elif not isinstance(self.training_cfg, TrainingConfig): msg = "training_cfg must be of type TrainingConfig" logger.error(msg) raise TypeError(msg) if self.reporting_cfg is None: logger.debug( "Using default reporting configuration to setup Optimizer!") self.reporting_cfg = ReportingConfig() elif not isinstance(self.reporting_cfg, ReportingConfig): msg = "reporting_cfg must be of type ReportingConfig" logger.error(msg) raise TypeError(msg) if not isinstance(self.copy_pretrained_embeddings, bool): msg = "copy_pretrained_embeddings must be a boolean datatype!" logger.error(msg) raise TypeError(msg) def __deepcopy__(self, memodict={}): training_cfg_copy = copy.deepcopy(self.training_cfg) reporting_cfg_copy = copy.deepcopy(self.reporting_cfg) return TorchTextOptimizerConfig(training_cfg_copy, reporting_cfg_copy, self.copy_pretrained_embeddings) def __eq__(self, other): if self.training_cfg == other.training_cfg and self.reporting_cfg == other.reporting_cfg and self.copy_pretrained_embeddings == other.copy_pretrained_embeddings: return True else: return False def save(self, fname): with open(fname, 'wb') as f: pickle.dump(self, f) @staticmethod def load(fname): with open(fname, 'rb') as f: loaded_optimzier_cfg = pickle.load(f) return loaded_optimzier_cfg def get_device_type(self): return str(self.training_cfg.device) class DefaultOptimizerConfig(OptimizerConfigInterface): def __init__(self, training_cfg: TrainingConfig = None, reporting_cfg: ReportingConfig = None): if training_cfg is None: logger.debug( "Using default training configuration to setup Optimizer!") self.training_cfg = TrainingConfig() elif not isinstance(training_cfg, TrainingConfig): msg = "training_cfg must be of type TrainingConfig" logger.error(msg) raise TypeError(msg) else: self.training_cfg = training_cfg if reporting_cfg is None: logger.debug( "Using default reporting configuration to setup Optimizer!") self.reporting_cfg = ReportingConfig() elif not isinstance(reporting_cfg, ReportingConfig): msg = "reporting_cfg must be of type ReportingConfig" logger.error(msg) raise TypeError(msg) else: self.reporting_cfg = reporting_cfg def __deepcopy__(self, memodict={}): training_cfg_copy = copy.deepcopy(self.training_cfg) reporting_cfg_copy = copy.deepcopy(self.reporting_cfg) return DefaultOptimizerConfig(training_cfg_copy, reporting_cfg_copy) def __eq__(self, other): if self.training_cfg == other.training_cfg and self.reporting_cfg == other.reporting_cfg: return True else: return False def get_device_type(self): return str(self.training_cfg.device) def save(self, fname): with open(fname, 'wb') as f: pickle.dump(self, f) @staticmethod def load(fname): with open(fname, 'rb') as f: loaded_optimzier_cfg = pickle.load(f) return loaded_optimzier_cfg class ModelGeneratorConfig(ConfigInterface): def __init__(self, arch_factory: ArchitectureFactory, data: DataManager, model_save_dir: str, stats_save_dir: str, num_models: int, arch_factory_kwargs: dict = None, arch_factory_kwargs_generator: Callable = None, optimizer: Union[Union[OptimizerInterface, DefaultOptimizerConfig], Sequence[Union[OptimizerInterface, DefaultOptimizerConfig]]] = None, parallel=False, amp=False, experiment_cfg: dict = None, run_ids: Union[Any, Sequence[Any]] = None, filenames: Union[str, Sequence[str]] = None, save_with_hash: bool = False): self.arch_factory = arch_factory self.arch_factory_kwargs = arch_factory_kwargs self.arch_factory_kwargs_generator = arch_factory_kwargs_generator self.data = data self.model_save_dir = model_save_dir self.stats_save_dir = stats_save_dir self.num_models = num_models self.optimizer = optimizer self.parallel = parallel self.amp = amp self.experiment_cfg = dict() if experiment_cfg is None else experiment_cfg self.run_ids = run_ids self.filenames = filenames self.save_with_hash = save_with_hash self.validate() def __deepcopy__(self, memodict={}): arch_factory_copy = copy.deepcopy( self.arch_factory) data_copy = copy.deepcopy(self.data) optimizer_copy = copy.deepcopy(self.optimizer) return ModelGeneratorConfig(arch_factory_copy, data_copy, self.model_save_dir, self.stats_save_dir, self.num_models, self.arch_factory_kwargs, self.arch_factory_kwargs_generator, optimizer_copy, self.parallel, self.amp, self.experiment_cfg, self.run_ids, self.filenames, self.save_with_hash) def __eq__(self, other): if self.arch_factory == other.arch_factory and self.data == other.data and self.optimizer == other.optimizer and self.parallel == other.parallel and self.amp == other.amp and self.model_save_dir == other.model_save_dir and self.stats_save_dir == other.stats_save_dir and self.arch_factory_kwargs == other.arch_factory_kwargs and self.arch_factory_kwargs_generator == other.arch_factory_kwargs_generator and self.experiment_cfg == other.experiment_cfg and self.run_ids == other.run_ids and self.filenames == other.filenames and self.save_with_hash == other.save_with_hash: return True else: return False def validate(self) -> None: if not (isinstance(self.arch_factory, ArchitectureFactory)): msg = "Expected an ArchitectureFactory object for argument 'architecture_factory', " "instead got type: {}".format(type(self.arch_factory)) logger.error(msg) raise TypeError(msg) if self.arch_factory_kwargs is not None and not isinstance(self.arch_factory_kwargs, dict): msg = "Expected dictionary for arch_factory_kwargs" logger.error(msg) raise TypeError(msg) if self.arch_factory_kwargs_generator is not None and not callable(self.arch_factory_kwargs_generator): msg = "arch_factory_kwargs_generator must be a Callable!" logger.error(msg) raise TypeError(msg) if not (isinstance(self.data, DataManager)): msg = "Expected an TrojaiDataManager object for argument 'data', " "instead got type: {}".format(type(self.data)) logger.error(msg) raise TypeError(msg) if not type(self.model_save_dir) == str: msg = "Expected type 'string' for argument 'model_save_dir, instead got type: " "{}".format(type(self.model_save_dir)) logger.error(msg) raise TypeError(msg) if not os.path.isdir(self.model_save_dir): try: os.makedirs(self.model_save_dir) except IOError as e: msg = "'model_save_dir' was not found and could not be created" "...\n{}".format(e.__traceback__) logger.error(msg) raise IOError(msg) if not type(self.num_models) == int: msg = "Expected type 'int' for argument 'num_models, instead got type: " "{}".format(type(self.num_models)) logger.error(msg) raise TypeError(msg) if self.filenames is not None: if isinstance(self.filenames, Sequence): for filename in self.filenames: if not type(filename) == str: msg = "Encountered non-string in argument 'filenames': {}".format( filename) logger.error(msg) raise TypeError(msg) else: if not isinstance(self.filenames, str): msg = "Filename provided as prefix must be of type string!" logger.error(msg) raise TypeError(msg) if self.run_ids is not None and len(self.run_ids) != self.num_models: msg = "Argument 'run_ids' was provided, but len(run_ids) != num_models" logger.error(msg) raise RuntimeError(msg) if self.filenames is not None and len(self.filenames) != self.num_models: msg = "Argument 'filenames' was provided, but len(filenames) != num_models" logger.error(msg) raise RuntimeError(msg) if self.run_ids is not None and self.filenames is not None: msg = "Argument 'filenames' was provided with argument 'run_ids', 'run_ids' will be ignored..." logger.warning(msg) if not isinstance(self.save_with_hash, bool): msg = "Expected boolean for save_with_hash argument" logger.error(msg) raise ValueError(msg) RunnerConfig.validate_optimizer(self.optimizer, self.data) if not isinstance(self.parallel, bool): msg = "parallel argument must be a boolean!" logger.error(msg) raise ValueError(msg)
Apache License 2.0
gkhayes/mlrose
mlrose/opt_probs.py
OptProb.set_state
python
def set_state(self, new_state): if len(new_state) != self.length: raise Exception("""new_state length must match problem length""") self.state = new_state self.fitness = self.eval_fitness(self.state)
Change the current state vector to a specified value and get its fitness. Parameters ---------- new_state: array New state vector value.
https://github.com/gkhayes/mlrose/blob/2a9d604ea464cccc48f30b8fe6b81fe5c4337c80/mlrose/opt_probs.py#L210-L224
import numpy as np from sklearn.metrics import mutual_info_score from scipy.sparse import csr_matrix from scipy.sparse.csgraph import minimum_spanning_tree, depth_first_tree from .fitness import TravellingSales class OptProb: def __init__(self, length, fitness_fn, maximize=True): if length < 0: raise Exception("""length must be a positive integer.""") elif not isinstance(length, int): if length.is_integer(): self.length = int(length) else: raise Exception("""length must be a positive integer.""") else: self.length = length self.state = np.array([0]*self.length) self.neighbors = [] self.fitness_fn = fitness_fn self.fitness = 0 self.population = [] self.pop_fitness = [] self.mate_probs = [] if maximize: self.maximize = 1.0 else: self.maximize = -1.0 def best_child(self): best = self.population[np.argmax(self.pop_fitness)] return best def best_neighbor(self): fitness_list = [] for neigh in self.neighbors: fitness = self.eval_fitness(neigh) fitness_list.append(fitness) best = self.neighbors[np.argmax(fitness_list)] return best def eval_fitness(self, state): if len(state) != self.length: raise Exception("state length must match problem length") fitness = self.maximize*self.fitness_fn.evaluate(state) return fitness def eval_mate_probs(self): pop_fitness = np.copy(self.pop_fitness) pop_fitness[pop_fitness == -1.0*np.inf] = 0 if np.sum(pop_fitness) == 0: self.mate_probs = np.ones(len(pop_fitness)) / len(pop_fitness) else: self.mate_probs = pop_fitness/np.sum(pop_fitness) def get_fitness(self): return self.fitness def get_length(self): return self.length def get_mate_probs(self): return self.mate_probs def get_maximize(self): return self.maximize def get_pop_fitness(self): return self.pop_fitness def get_population(self): return self.population def get_state(self): return self.state def set_population(self, new_population): self.population = new_population pop_fitness = [] for i in range(len(self.population)): fitness = self.eval_fitness(self.population[i]) pop_fitness.append(fitness) self.pop_fitness = np.array(pop_fitness)
BSD 3-Clause New or Revised License
csyxwei/ffwm
models/base_model.py
BaseModel.update_learning_rate
python
def update_learning_rate(self): for scheduler in self.schedulers: if self.opt.lr_policy == 'plateau': scheduler.step(self.metric) else: scheduler.step() lr = self.optimizers[0].param_groups[0]['lr'] print('learning rate = %.7f' % lr)
Update learning rates for all the networks; called at the end of every epoch
https://github.com/csyxwei/ffwm/blob/d42c578cabe1b81c6b1bb0c3cb707b190fca3c68/models/base_model.py#L145-L154
import os import torch from collections import OrderedDict from abc import ABC, abstractmethod from . import networks class BaseModel(ABC): def __init__(self, opt): self.opt = opt self.gpu_ids = opt.gpu_ids self.isTrain = opt.isTrain self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if self.gpu_ids else torch.device('cpu') self.save_dir = os.path.join(opt.checkpoints_dir, opt.name) self.load_dir = os.path.join(opt.checkpoints_dir, opt.name) if opt.preprocess != 'scale_width': torch.backends.cudnn.benchmark = True self.loss_names = [] self.model_names = [] self.visual_names = [] self.optimizers = [] self.image_paths = [] self.metric = 0 @staticmethod def modify_commandline_options(parser, is_train): return parser def set_input(self, input): self.set_visual_name() if self.isTrain: self.set_train_input(input) else: self.set_test_input(input) @abstractmethod def forward(self): pass @abstractmethod def optimize_parameters(self): pass @abstractmethod def set_visual_name(self): pass def setup(self, opt): if self.isTrain: self.schedulers = [networks.get_scheduler(optimizer, opt) for optimizer in self.optimizers] if opt.continue_train: load_suffix = 'iter_%d' % opt.load_iter if opt.load_iter > 0 else opt.epoch self.load_networks(load_suffix) self.print_networks(opt.verbose) def setup4test(self, opt): load_suffix = 'iter_%d' % opt.load_iter if opt.load_iter > 0 else opt.epoch self.load_networks(load_suffix) return self.print_networks(opt.verbose) def set_eval(self): for name in self.model_names: if isinstance(name, str): net = getattr(self, name) net.eval() self.isTrain = False def set_train(self): for name in self.model_names: if isinstance(name, str): net = getattr(self, name) net.train() self.isTrain = True def compute_visuals(self): pass def get_image_paths(self): return self.image_paths
MIT License
spesmilo/electrum
electrum/keystore.py
MasterPublicKeyMixin.derive_pubkey
python
def derive_pubkey(self, for_change: int, n: int) -> bytes: pass
Returns pubkey at given path. May raise CannotDerivePubkey.
https://github.com/spesmilo/electrum/blob/1ff9f9910f1a8d17ebba04439c1fcaf1476b3e8f/electrum/keystore.py#L375-L379
from unicodedata import normalize import hashlib import re from typing import Tuple, TYPE_CHECKING, Union, Sequence, Optional, Dict, List, NamedTuple from functools import lru_cache from abc import ABC, abstractmethod from . import bitcoin, ecc, constants, bip32 from .bitcoin import deserialize_privkey, serialize_privkey, BaseDecodeError from .transaction import Transaction, PartialTransaction, PartialTxInput, PartialTxOutput, TxInput from .bip32 import (convert_bip32_path_to_list_of_uint32, BIP32_PRIME, is_xpub, is_xprv, BIP32Node, normalize_bip32_derivation, convert_bip32_intpath_to_strpath, is_xkey_consistent_with_key_origin_info) from .ecc import string_to_number from .crypto import (pw_decode, pw_encode, sha256, sha256d, PW_HASH_VERSION_LATEST, SUPPORTED_PW_HASH_VERSIONS, UnsupportedPasswordHashVersion, hash_160) from .util import (InvalidPassword, WalletFileException, BitcoinException, bh2u, bfh, inv_dict, is_hex_str) from .mnemonic import Mnemonic, Wordlist, seed_type, is_seed from .plugin import run_hook from .logging import Logger if TYPE_CHECKING: from .gui.qt.util import TaskThread from .plugins.hw_wallet import HW_PluginBase, HardwareClientBase, HardwareHandlerBase from .wallet_db import WalletDB class CannotDerivePubkey(Exception): pass class KeyStore(Logger, ABC): type: str def __init__(self): Logger.__init__(self) self.is_requesting_to_be_rewritten_to_wallet_file = False def has_seed(self) -> bool: return False def is_watching_only(self) -> bool: return False def can_import(self) -> bool: return False def get_type_text(self) -> str: return f'{self.type}' @abstractmethod def may_have_password(self): pass def _get_tx_derivations(self, tx: 'PartialTransaction') -> Dict[str, Union[Sequence[int], str]]: keypairs = {} for txin in tx.inputs(): keypairs.update(self._get_txin_derivations(txin)) return keypairs def _get_txin_derivations(self, txin: 'PartialTxInput') -> Dict[str, Union[Sequence[int], str]]: if txin.is_complete(): return {} keypairs = {} for pubkey in txin.pubkeys: if pubkey in txin.part_sigs: continue derivation = self.get_pubkey_derivation(pubkey, txin) if not derivation: continue keypairs[pubkey.hex()] = derivation return keypairs def can_sign(self, tx: 'Transaction', *, ignore_watching_only=False) -> bool: if not ignore_watching_only and self.is_watching_only(): return False if not isinstance(tx, PartialTransaction): return False return bool(self._get_tx_derivations(tx)) def can_sign_txin(self, txin: 'TxInput', *, ignore_watching_only=False) -> bool: if not ignore_watching_only and self.is_watching_only(): return False if not isinstance(txin, PartialTxInput): return False return bool(self._get_txin_derivations(txin)) def ready_to_sign(self) -> bool: return not self.is_watching_only() @abstractmethod def dump(self) -> dict: pass @abstractmethod def is_deterministic(self) -> bool: pass @abstractmethod def sign_message(self, sequence: 'AddressIndexGeneric', message, password) -> bytes: pass @abstractmethod def decrypt_message(self, sequence: 'AddressIndexGeneric', message, password) -> bytes: pass @abstractmethod def sign_transaction(self, tx: 'PartialTransaction', password) -> None: pass @abstractmethod def get_pubkey_derivation(self, pubkey: bytes, txinout: Union['PartialTxInput', 'PartialTxOutput'], *, only_der_suffix=True) -> Union[Sequence[int], str, None]: pass def find_my_pubkey_in_txinout( self, txinout: Union['PartialTxInput', 'PartialTxOutput'], *, only_der_suffix: bool = False ) -> Tuple[Optional[bytes], Optional[List[int]]]: for pubkey in txinout.bip32_paths: path = self.get_pubkey_derivation(pubkey, txinout, only_der_suffix=only_der_suffix) if path and not isinstance(path, (str, bytes)): return pubkey, list(path) return None, None def can_have_deterministic_lightning_xprv(self) -> bool: return False class Software_KeyStore(KeyStore): def __init__(self, d): KeyStore.__init__(self) self.pw_hash_version = d.get('pw_hash_version', 1) if self.pw_hash_version not in SUPPORTED_PW_HASH_VERSIONS: raise UnsupportedPasswordHashVersion(self.pw_hash_version) def may_have_password(self): return not self.is_watching_only() def sign_message(self, sequence, message, password) -> bytes: privkey, compressed = self.get_private_key(sequence, password) key = ecc.ECPrivkey(privkey) return key.sign_message(message, compressed) def decrypt_message(self, sequence, message, password) -> bytes: privkey, compressed = self.get_private_key(sequence, password) ec = ecc.ECPrivkey(privkey) decrypted = ec.decrypt_message(message) return decrypted def sign_transaction(self, tx, password): if self.is_watching_only(): return self.check_password(password) keypairs = self._get_tx_derivations(tx) for k, v in keypairs.items(): keypairs[k] = self.get_private_key(v, password) if keypairs: tx.sign(keypairs) @abstractmethod def update_password(self, old_password, new_password): pass @abstractmethod def check_password(self, password): pass @abstractmethod def get_private_key(self, sequence: 'AddressIndexGeneric', password) -> Tuple[bytes, bool]: pass class Imported_KeyStore(Software_KeyStore): type = 'imported' def __init__(self, d): Software_KeyStore.__init__(self, d) self.keypairs = d.get('keypairs', {}) def is_deterministic(self): return False def dump(self): return { 'type': self.type, 'keypairs': self.keypairs, 'pw_hash_version': self.pw_hash_version, } def can_import(self): return True def check_password(self, password): pubkey = list(self.keypairs.keys())[0] self.get_private_key(pubkey, password) def import_privkey(self, sec, password): txin_type, privkey, compressed = deserialize_privkey(sec) pubkey = ecc.ECPrivkey(privkey).get_public_key_hex(compressed=compressed) serialized_privkey = serialize_privkey( privkey, compressed, txin_type, internal_use=True) self.keypairs[pubkey] = pw_encode(serialized_privkey, password, version=self.pw_hash_version) return txin_type, pubkey def delete_imported_key(self, key): self.keypairs.pop(key) def get_private_key(self, pubkey: str, password): sec = pw_decode(self.keypairs[pubkey], password, version=self.pw_hash_version) try: txin_type, privkey, compressed = deserialize_privkey(sec) except BaseDecodeError as e: raise InvalidPassword() from e if pubkey != ecc.ECPrivkey(privkey).get_public_key_hex(compressed=compressed): raise InvalidPassword() return privkey, compressed def get_pubkey_derivation(self, pubkey, txin, *, only_der_suffix=True): if pubkey.hex() in self.keypairs: return pubkey.hex() return None def update_password(self, old_password, new_password): self.check_password(old_password) if new_password == '': new_password = None for k, v in self.keypairs.items(): b = pw_decode(v, old_password, version=self.pw_hash_version) c = pw_encode(b, new_password, version=PW_HASH_VERSION_LATEST) self.keypairs[k] = c self.pw_hash_version = PW_HASH_VERSION_LATEST class Deterministic_KeyStore(Software_KeyStore): def __init__(self, d): Software_KeyStore.__init__(self, d) self.seed = d.get('seed', '') self.passphrase = d.get('passphrase', '') self._seed_type = d.get('seed_type', None) def is_deterministic(self): return True def dump(self): d = { 'type': self.type, 'pw_hash_version': self.pw_hash_version, } if self.seed: d['seed'] = self.seed if self.passphrase: d['passphrase'] = self.passphrase if self._seed_type: d['seed_type'] = self._seed_type return d def has_seed(self): return bool(self.seed) def get_seed_type(self) -> Optional[str]: return self._seed_type def is_watching_only(self): return not self.has_seed() @abstractmethod def format_seed(self, seed: str) -> str: pass def add_seed(self, seed): if self.seed: raise Exception("a seed exists") self.seed = self.format_seed(seed) self._seed_type = seed_type(seed) or None def get_seed(self, password): if not self.has_seed(): raise Exception("This wallet has no seed words") return pw_decode(self.seed, password, version=self.pw_hash_version) def get_passphrase(self, password): if self.passphrase: return pw_decode(self.passphrase, password, version=self.pw_hash_version) else: return '' class MasterPublicKeyMixin(ABC): @abstractmethod def get_master_public_key(self) -> str: pass @abstractmethod def get_derivation_prefix(self) -> Optional[str]: pass @abstractmethod def get_root_fingerprint(self) -> Optional[str]: pass @abstractmethod def get_fp_and_derivation_to_be_used_in_partial_tx( self, der_suffix: Sequence[int], *, only_der_suffix: bool, ) -> Tuple[bytes, Sequence[int]]: pass @abstractmethod
MIT License
rubienr/network-monitoring
data_vis/views.py
transformTransferProbes2PiechartData
python
def transformTransferProbes2PiechartData(direction, timeFrame): objects = None if "download" in direction and "upload" in direction: objects = TransferTestResult.objects .filter(transferStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]]) else: objects = TransferTestResult.objects .filter(transferStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]]) .filter(direction=direction) results = {} for result in objects: if result.probeName in results: results[result.probeName].append(result) else: results[result.probeName] = [result] xdata = results.keys() ydata = [len(results[x]) for x in results] extra_serie = {"tooltip": {"y_start": "", "y_end": " probes"}} chartdata = {'x': xdata, 'y': ydata, "extra": extra_serie} charttype = "pieChart" chartcontainer = 'piechart_container' title = "Total %s" if "download" in direction and "upload" in direction: title = title % "Up-/Downloads" elif "download" in direction: title = title % "Downloads" elif "upload" in direction: title = title % "Uploads" data = { "title": title, 'charttype': charttype, 'chartdata': chartdata, 'chartcontainer': chartcontainer, 'extra': { 'x_is_date': False, 'x_axis_format': '', 'tag_script_js': True, 'jquery_on_ready': False, "donut": True, "showLabels": True, } } return data
Arguments direction: download, upload, downloadupload
https://github.com/rubienr/network-monitoring/blob/8bcdba2a60b9e418b9ba56181b1cbb08c0a099aa/data_vis/views.py#L304-L358
from __future__ import unicode_literals import datetime import time from collections import OrderedDict import speedtest_cli as speedtest from django.db.models import Max, Min from django.shortcuts import render from django.shortcuts import render_to_response from django.template.context import RequestContext from django.views.generic import TemplateView from common.models import PingTestResult from common.models import SpeedtestServer from common.models import TransferTestResult from service.probing import getLocalIp def defaultView(request): return render_to_response('bootstrap/base.html', context_instance=RequestContext(request)) def transformPingProbes2TimelinechartData(timeFrame): objects = PingTestResult.objects.filter(pingStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]]) hostToTimestampToValue = {} for result in objects: timestamp = time.mktime(result.pingStart.timetuple()) * 1000 host = result.probeName value = result.rttAvg if host not in hostToTimestampToValue .keys(): hostToTimestampToValue[host] = {} hostToTimestampToValue[host][timestamp] = value relaxedData = [] for host in hostToTimestampToValue.keys(): relaxedData.append(seriesToReturnToZeroSeries(hostToTimestampToValue[host])) xValues, chartData = mergeDictionariesToChartData(relaxedData) extra_serie = {"tooltip": {"y_start": "", "y_end": " [ms] avg. delay"}} chartdata = { 'x': xValues, } idx = 1 hostnameLookup = dict(zip(chartData.keys(), hostToTimestampToValue.keys())) for key, hostData in chartData.items(): chartdata["name%s" % idx] = hostnameLookup["y%s" % idx] chartdata["y%s" % idx] = hostData.values() chartdata["extra%s" % idx] = extra_serie idx += 1 axis_date= "%H:%M %p" tooltip_date = "%d %b %H:%M" data = { 'charttype': "lineWithFocusChart", 'chartdata': chartdata, "chartcontainer": "linewithfocuschart_container", "title": "Average Ping Duration", "extra": { 'x_is_date': True, 'x_axis_format': axis_date, "charttooltip_dateformat": tooltip_date, 'tag_script_js': True, 'jquery_on_ready': False } } return data def transformTransferProbes2TimelinechartData(direction, timeFrame): objects = None isBothDirections = False if "download" in direction and "upload" in direction: isBothDirections = True objects = TransferTestResult.objects .filter(transferStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]]) else: objects = TransferTestResult.objects .filter(transferStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]]) .filter(direction=direction) hostToTimestampToValue = {} for result in objects: timestamp = time.mktime(result.transferStart.timetuple()) * 1000.0 if isBothDirections: host = "%s (%s)" % (result.probeName, result.direction) else: host = result.probeName throughput = round(((result.transferredUnitsPerSecond * 1) / (1000.0 * 1000)), 2) if host not in hostToTimestampToValue.keys(): hostToTimestampToValue[host] = {} hostToTimestampToValue[host][timestamp] = throughput relaxedData = [] for host in hostToTimestampToValue.keys(): relaxedData.append(seriesToReturnToZeroSeries(hostToTimestampToValue[host])) xValues, chartData = mergeDictionariesToChartData(relaxedData) extra_serie = {"tooltip": {"y_start": "", "y_end": "MBit/s"}} chartdata = { 'x': xValues, } idx = 1 hostnameLookup = dict(zip(chartData.keys(), hostToTimestampToValue.keys())) for key, hostData in chartData.items(): chartdata["name%s" % idx] = hostnameLookup["y%s" % idx] chartdata["y%s" % idx] = hostData.values() chartdata["extra%s" % idx] = extra_serie idx += 1 title = "" if "download" in direction and "upload" in direction: title = "Up-/Download Speed Tests" elif "download" in direction: title = "Download Speed Tests" elif "upload" in direction: title = "Upload Speed Tests" axis_date= "%H:%M" tooltip_date = "%d %b %H:%M" data = { 'charttype': "lineWithFocusChart", 'chartdata': chartdata, "chartcontainer": "linewithfocuschart_container", "title": title, "extra": { 'x_is_date': True, 'x_axis_format': axis_date, "charttooltip_dateformat": tooltip_date, 'tag_script_js': True, 'jquery_on_ready': False } } return data def transformProbes2PreviewTimelinechartData(): timestampToPingProbes = {} roundSeconds = -2 for result in PingTestResult.objects.order_by('pingStart').all(): timestamp = int(round(time.mktime(result.pingStart.timetuple()), roundSeconds)) if timestamp in timestampToPingProbes.keys(): timestampToPingProbes[timestamp] = timestampToPingProbes[timestamp] + 1 else: timestampToPingProbes[timestamp] = 1 timestampToTransferProbes = {} for result in TransferTestResult.objects.order_by('transferStart').all(): timestamp = int(round(time.mktime(result.transferStart.timetuple()), roundSeconds)) if timestamp in timestampToTransferProbes.keys(): timestampToTransferProbes[timestamp] = timestampToTransferProbes[timestamp] + 1 else: timestampToTransferProbes[timestamp] = 1 pingChartData = seriesToReturnToZeroSeries(timestampToPingProbes) transferChartData = seriesToReturnToZeroSeries(timestampToTransferProbes) xValues, theData = mergeDictionariesToChartData([pingChartData, transferChartData]) extra_serie = {"tooltip": {"y_start": "", "y_end": " probes"}} chartdata = {'x': [1000 * s for s in xValues]} chartdata["name1"] = "ping probes" chartdata["y1"] = theData["y1"].values() chartdata["extra1"] = extra_serie chartdata["name2"] = "transfer probes" chartdata["y2"] = theData["y2"].values() chartdata["extra2"] = extra_serie if len(chartdata["x"]) > 30: title = "Specify time window to generate charts from (optional):" elif len(chartdata["x"]) > 0: title = "Data overview (still less probes):" else: title = "Unfortunately no data available. Please configure and start the service." axis_date= "%d %b" tooltip_date = "%d %b %H:%M" data = { 'preview_charttype': "lineWithFocusChart", 'preview_chartdata': chartdata, "preview_chartcontainer": "linewithfocuschart_container", "preview_title": title, "preview_extra": { 'x_is_date': True, 'x_axis_format': axis_date, "charttooltip_dateformat": tooltip_date, 'tag_script_js': True, 'jquery_on_ready': False } } return data def getClosestServersView(request): config = speedtest.getConfig() closestServers = speedtest.closestServers(config['client']) models = [] localIp = getLocalIp("speedtest.net") for server in closestServers: server["serverId"] = server.pop("id") model = SpeedtestServer().fromDict(**server) model.interfaceIp = localIp models.append(model) SpeedtestServer.objects.bulk_create(models) title = "Speedtest.net - Closest Server" columnToName = OrderedDict ([ ("serverId", "ID"), ("name", "City"), ("url", "URL"), ("country", "Country"), ("d", "Distance [km]"), ("sponsor", ""), ("lat", "Latitude"), ("lon", "Longitude"), ]) columns = columnToName.keys() servers = [] for c in closestServers: server = OrderedDict([(columnToName[filteredColumn], c[filteredColumn]) for filteredColumn in columns]) distanceColumn = columnToName["d"] server[distanceColumn] = round(server[distanceColumn],1) servers.append(server) data = { "title": title, "tableHeader" : servers[0].keys(), "servers": servers, } return render_to_response('bootstrap/serverlist.html', data, context_instance=RequestContext(request)) def transformPingProbes2PiechartData(timeFrame): objects = PingTestResult.objects.filter(pingStart__range=[timeFrame["fromDateTime"], timeFrame["toDateTime"]]) results = {} for result in objects: if result.probeName in results: results[result.probeName].append(result) else: results[result.probeName] = [result] xdata = results.keys() ydata = [len(results[x]) for x in results] extra_serie = {"tooltip": {"y_start": "", "y_end": " probes"}} chartdata = {'x': xdata, 'y': ydata, "extra": extra_serie} charttype = "pieChart" chartcontainer = 'piechart_container' data = { "title": "Total Pings", 'charttype': charttype, 'chartdata': chartdata, 'chartcontainer': chartcontainer, 'extra': { 'x_is_date': False, 'tag_script_js': True, 'jquery_on_ready': False, "donut": True, "showLabels": True, } } return data
Apache License 2.0
open-risk/opennpl
openNPL/views.py
api_root
python
def api_root(request, format=None): data = [ {'NPL Template Endpoints': [ {'npl_counterparty_group': reverse('npl_portfolio:npl_counterparty_group_api', request=request, format=format)}, {'npl_counterparty': reverse('npl_portfolio:npl_counterparty_api', request=request, format=format)}, {'npl_loan': reverse('npl_portfolio:npl_loan_api', request=request, format=format)}, {'npl_enforcement': reverse('npl_portfolio:npl_enforcement_api', request=request, format=format)}, {'npl_forbearance': reverse('npl_portfolio:npl_forbearance_api', request=request, format=format)}, {'npl_nonproperty_collateral': reverse('npl_portfolio:npl_nonproperty_collateral_api', request=request, format=format)}, {'npl_property_collateral': reverse('npl_portfolio:npl_property_collateral_api', request=request, format=format)}, {'npl_external_collection': reverse('npl_portfolio:npl_external_collection_api', request=request, format=format)}, ]}, ] return Response(data)
Returns a list of all active API endpoints in the OpenNPL installation, grouped by functionality: - **NPL Data Endpoints** implements a REST CRUD interface to EBA Templated NPL Data
https://github.com/open-risk/opennpl/blob/fff1d219b5180305fd8b9e742e0e26dc0306f45d/openNPL/views.py#L44-L71
from rest_framework import permissions from rest_framework import status from rest_framework.decorators import api_view, permission_classes from rest_framework.response import Response from rest_framework.parsers import JSONParser from rest_framework.reverse import reverse from npl_portfolio.models import CounterpartyGroup, Counterparty, Loan, Enforcement, Forbearance, NonPropertyCollateral, PropertyCollateral, ExternalCollection from openNPL.serializers import NPL_CounterpartyGroupSerializer, NPL_CounterpartyGroupDetailSerializer from openNPL.serializers import NPL_CounterpartySerializer, NPL_CounterpartyDetailSerializer from openNPL.serializers import NPL_EnforcementSerializer, NPL_EnforcementDetailSerializer from openNPL.serializers import NPL_ExternalCollectionSerializer, NPL_ExternalCollectionDetailSerializer from openNPL.serializers import NPL_ForbearanceSerializer, NPL_ForbearanceDetailSerializer from openNPL.serializers import NPL_LoanSerializer, NPL_LoanDetailSerializer from openNPL.serializers import NPL_NonPropertyCollateralSerializer, NPL_NonPropertyCollateralDetailSerializer from openNPL.serializers import NPL_PropertyCollateralSerializer, NPL_PropertyCollateralDetailSerializer @api_view(['GET']) @permission_classes((permissions.AllowAny,))
MIT License
spamexperts/orangeassassin
oa/db/bayes/sqlalchemy.py
Store.nspam_nham_change
python
def nspam_nham_change(self, spam, ham): self.conn.execute( "UPDATE bayes_vars " "SET spam_count=:spam_count, ham_count=:ham_count", {'spam_count': spam, 'ham_count': ham} ) self.conn.commit()
Set the spam and ham counts for the database.
https://github.com/spamexperts/orangeassassin/blob/9d0d4bd3ef7d09bf2da195280730f790f1229458/oa/db/bayes/sqlalchemy.py#L161-L168
from __future__ import absolute_import from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import Column, Integer, PrimaryKeyConstraint, String, LargeBinary Base = declarative_base() class BayesExpire(Base): __tablename__ = 'bayes_expire' id = Column("id", Integer) runtime = Column("runtime", Integer) __table_args__ = ( PrimaryKeyConstraint("id"),) class BayesGlobalVars(Base): __tablename__ = "bayes_global_vars" variable = Column("variable", String(30)) value = Column("value", String(200)) __table_args__ = ( PrimaryKeyConstraint("variable"),) class BayesSeen(Base): __tablename__ = "bayes_seen" id = Column("id", Integer) msgid = Column("msgid", String(200)) flag = Column("flag", String(1)) __table_args__ = ( PrimaryKeyConstraint("id", "msgid"),) class BayesToken(Base): __tablename__ = "bayes_token" id = Column("id", Integer) token = Column("token", LargeBinary(5)) spam_count = Column("spam_count", Integer) ham_count = Column("ham_count", Integer) atime = Column("atime", Integer) __table_args__ = ( PrimaryKeyConstraint("id", "token"),) class BayesVars(Base): __tablename__ = "bayes_vars" id = Column("id", Integer) username = Column("username", String(200)) spam_count = Column("spam_count", Integer) ham_count = Column("ham_count", Integer) token_count = Column("token_count", Integer) last_expire = Column("last_expire", Integer) last_atime_delta = Column("last_atime_delta", Integer) last_expire_reduce = Column("last_expire_reduce", Integer) oldest_token_age = Column("oldest_token_age", Integer, default=2147483647) newest_token_age = Column("newest_token_age", Integer) __table_args__ = ( PrimaryKeyConstraint("id"),) class Store(object): def __init__(self, plugin): self.engine = plugin.get_engine() self.conn = None self.plugin = plugin def untie_db(self): if self.conn: self.conn.close() def tie_db_readonly(self): return self.tie_db_writeable() def tie_db_writeable(self): self.conn = self.plugin.get_session() return True def tok_get(self, token): return self.conn.execute( "SELECT spam_count, ham_count, atime " "FROM bayes_token WHERE token=:token", {"token": token} ).fetchone() def tok_get_all(self, tokens): for token in tokens: try: yield self.conn.query( BayesToken.token, BayesToken.spam_count, BayesToken.ham_count, BayesToken.atime, ).filter(BayesToken.token==bytes(token)).one() except NoResultFound: yield None def seen_get(self, msgid): return self.conn.execute( "SELECT flag FROM bayes_seen WHERE msgid=:msgid", {'msgid': msgid}).fetchone()[0] def seen_delete(self, id, msgid): self.conn.execute( "DELETE FROM bayes_seen WHERE msgid = :msgid", {"msgid": msgid} ) def seen_put(self, msgid, flag): self.conn.execute( "UPDATE bayes_seen SET flag=:flag WHERE msgid=:msgid", {"flag": flag, "msgid": msgid}) self.conn.commit() def cleanup(self): pass def nspam_nham_get(self): return self.conn.execute( "SELECT spam_count, ham_count FROM bayes_vars LIMIT 1" ).fetchone()
Apache License 2.0
itisfoundation/osparc-simcore
services/web/server/tests/integration/01/test_exporter.py
push_services_to_registry
python
def push_services_to_registry(docker_registry: str, node_meta_schema: Dict) -> None: _pull_push_service( "itisfoundation/sleeper", "2.0.2", docker_registry, node_meta_schema ) _pull_push_service( "itisfoundation/sleeper", "2.1.1", docker_registry, node_meta_schema )
Adds a itisfoundation/sleeper in docker registry
https://github.com/itisfoundation/osparc-simcore/blob/a50b61735381231abba2cfcd57f3314785c656b0/services/web/server/tests/integration/01/test_exporter.py#L261-L270
import asyncio import cgi import itertools import json import logging import operator import sys import tempfile from collections import deque from contextlib import contextmanager from copy import deepcopy from pathlib import Path from typing import Any, Callable, Coroutine, Dict, List, Set, Tuple import aiofiles import aiohttp import aiopg import aioredis import pytest from models_library.settings.redis import RedisConfig from pytest_simcore.docker_registry import _pull_push_service from pytest_simcore.helpers.utils_login import log_client_in from servicelib.aiohttp.application import create_safe_application from simcore_postgres_database.models.services import ( services_access_rights, services_meta_data, ) from simcore_service_webserver.application import ( setup_director, setup_director_v2, setup_exporter, setup_login, setup_products, setup_projects, setup_resource_manager, setup_rest, setup_security, setup_session, setup_socketio, setup_storage, setup_users, ) from simcore_service_webserver.catalog import setup_catalog from simcore_service_webserver.constants import X_PRODUCT_NAME_HEADER from simcore_service_webserver.db import setup_db from simcore_service_webserver.db_models import projects from simcore_service_webserver.exporter.async_hashing import Algorithm, checksum from simcore_service_webserver.exporter.file_downloader import ParallelDownloader from simcore_service_webserver.scicrunch.submodule_setup import ( setup_scicrunch_submodule, ) from simcore_service_webserver.security_roles import UserRole from simcore_service_webserver.storage_handlers import get_file_download_url from sqlalchemy.dialects.postgresql import insert as pg_insert from yarl import URL log = logging.getLogger(__name__) pytest_simcore_core_services_selection = [ "redis", "rabbit", "catalog", "dask-scheduler", "director", "director-v2", "postgres", "storage", ] pytest_simcore_ops_services_selection = ["minio"] CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent DATA_DIR = CURRENT_DIR.parent.parent / "data" assert DATA_DIR.exists(), "expected folder under tests/data" API_VERSION = "v0" API_PREFIX = "/" + API_VERSION SUPPORTED_EXPORTER_VERSIONS = {"v1", "v2"} REMAPPING_KEY = "__reverse__remapping__dict__key__" KEYS_TO_IGNORE_FROM_COMPARISON = { "id", "uuid", "name", "creation_date", "last_change_date", "runHash", "eTag", REMAPPING_KEY, } @pytest.fixture(autouse=True) def __drop_and_recreate_postgres__(database_from_template_before_each_function) -> None: yield @pytest.fixture(autouse=True) async def __delete_all_redis_keys__(redis_service: RedisConfig): client = await aioredis.create_redis_pool(redis_service.dsn, encoding="utf-8") await client.flushall() client.close() await client.wait_closed() yield @pytest.fixture async def monkey_patch_aiohttp_request_url() -> None: old_request = aiohttp.ClientSession._request async def new_request(*args, **kwargs): assert len(args) == 3 url = args[2] if isinstance(url, str): url = URL(url) if url.host == "director-v2": from pytest_simcore.helpers.utils_docker import get_service_published_port log.debug("MOCKING _request [before] url=%s", url) new_port = int(get_service_published_port("director-v2", 8000)) url = url.with_host("172.17.0.1").with_port(new_port) log.debug("MOCKING _request [after] url=%s kwargs=%s", url, str(kwargs)) args = args[0], args[1], url return await old_request(*args, **kwargs) aiohttp.ClientSession._request = new_request yield aiohttp.ClientSession._request = old_request @pytest.fixture def client( loop, aiohttp_client, app_config, postgres_with_template_db, mock_orphaned_services ): cfg = deepcopy(app_config) assert cfg["rest"]["version"] == API_VERSION assert cfg["rest"]["enabled"] cfg["projects"]["enabled"] = True cfg["director"]["enabled"] = True app = create_safe_application(cfg) setup_db(app) setup_session(app) setup_security(app) setup_rest(app) setup_login(app) setup_users(app) setup_socketio(app) setup_projects(app) setup_director(app) setup_director_v2(app) setup_exporter(app) setup_storage(app) setup_products(app) setup_catalog(app) setup_scicrunch_submodule(app) assert setup_resource_manager(app) yield loop.run_until_complete( aiohttp_client( app, server_kwargs={"port": cfg["main"]["port"], "host": cfg["main"]["host"]}, ) ) async def login_user(client): return await log_client_in(client=client, user_data={"role": UserRole.USER.name}) def get_exported_projects() -> List[Path]: exporter_dir = DATA_DIR / "exporter" assert exporter_dir.exists() exported_files = [x for x in exporter_dir.glob("*.osparc")] assert exported_files, "expected *.osparc files, none found" return exported_files @pytest.fixture async def apply_access_rights(aiopg_engine: aiopg.sa.Engine) -> Coroutine: async def grant_rights_to_services(services: List[Tuple[str, str]]) -> None: for service_key, service_version in services: metada_data_values = dict( key=service_key, version=service_version, owner=1, name="", description=f"OVERRIDEN BY TEST in {__file__}", ) access_rights_values = dict( key=service_key, version=service_version, gid=1, execute_access=True, write_access=True, ) async with aiopg_engine.acquire() as conn: await conn.execute( pg_insert(services_meta_data) .values(**metada_data_values) .on_conflict_do_update( index_elements=[ services_meta_data.c.key, services_meta_data.c.version, ], set_=metada_data_values, ) ) await conn.execute( pg_insert(services_access_rights) .values(**access_rights_values) .on_conflict_do_update( index_elements=[ services_access_rights.c.key, services_access_rights.c.version, services_access_rights.c.gid, services_access_rights.c.product_name, ], set_=access_rights_values, ) ) yield grant_rights_to_services @pytest.fixture async def grant_access_rights(apply_access_rights: Coroutine) -> None: services = [ ("simcore/services/comp/itis/sleeper", "2.0.2"), ("simcore/services/comp/itis/sleeper", "2.1.1"), ] await apply_access_rights(services) @pytest.fixture(scope="session")
MIT License
google/qkeras
experimental/lo/random_forest/parser.py
p_opt_new_line
python
def p_opt_new_line(p): pass
opt_new_line : NEWLINE |
https://github.com/google/qkeras/blob/8dbc20d3ca32cf4536ad675d57e70586f8aedbb2/experimental/lo/random_forest/parser.py#L134-L138
from ply import yacc from ply import lex import numpy as np _1 = 1 _0 = 2 _X = 3 _U = 0 NOT = {_0: _1, _1: _0, _X: _U, _U: _U} class PLA: def __init__(self): self.pla_i = [] self.pla_o = [] pla = PLA() tokens = [ "I", "O", "MV", "ILB", "OB", "P", "L", "E", "TYPE", "SYMBOL", "NUMBER", "NEWLINE" ] t_ignore = " \t|" t_I = r"\.[iI]" t_O = r"\.[oO]" t_MV = r"\.[mM][vV]" t_ILB = r"\.[iI][lL][bB]" t_OB = r"\.[oO][bB]" t_P = r"\.[pP]" t_L = r"\.[lL]" t_E = r"\.[eE]" t_TYPE = r"\.type" t_SYMBOL = r"[a-zA-Z_][a-zA-Z0-9_\<\>\-\$]*" def t_NUMBER(t): return t def t_NEWLINE(t): t.lexer.lineno += t.value.count("\n") return t def t_error(t): print("Illegal character '{}'".format(t.value)) t.lexer.skip(1) lex.lex() def p_pla(p): def p_pla_declarations(p): def p_pla_declaration(p): token = p[1].lower() if token == ".i": pla.ni = int(p[2]) elif token == ".o": pla.no = int(p[2]) elif token == ".mv": pla.mv = [int(v) for v in p[2]] elif token == ".ilb": pla.ilb = p[2] elif token == ".ob": pla.ob = p[2] elif token == ".l": pla.label = p[2] elif token == ".type": pla.set_type = p[2] def p_pla_table(p): if len(p[1:]) == 3: line = "".join(p[2]) else: line = "".join(p[1]) assert hasattr(pla, "ni") and hasattr(pla, "no") line = [_1 if v == "1" else _0 if v == "0" else _X for v in line] pla.pla_i.append(line[0:pla.ni]) pla.pla_o.append(line[pla.ni:]) def p_pla_end(p): pass
Apache License 2.0
voxelers/mcthings
mcthings/decorators/decorator.py
Decorator.create
python
def create(self):
Create the decorator :return:
https://github.com/voxelers/mcthings/blob/7df7eba679864b31472e4da585e5273913f513e5/mcthings/decorators/decorator.py#L27-L32
from mcpi.minecraft import Minecraft import mcpi.block from mcthings.blocks_memory import BlocksMemory from mcthings.world import World class Decorator: block = mcpi.block.AIR def __init__(self, thing): self._blocks_memory = BlocksMemory() self._thing = thing
Apache License 2.0
roads/psiz
tests/examples/test_mle_1g.py
ground_truth
python
def ground_truth(n_stimuli, n_dim, similarity_func): stimuli = tf.keras.layers.Embedding( n_stimuli+1, n_dim, mask_zero=True, embeddings_initializer=tf.keras.initializers.RandomNormal( stddev=.17, seed=4 ) ) if similarity_func == 'Exponential': similarity = psiz.keras.layers.ExponentialSimilarity( fit_tau=False, fit_gamma=False, tau_initializer=tf.keras.initializers.Constant(1.), gamma_initializer=tf.keras.initializers.Constant(0.001), ) elif similarity_func == 'StudentsT': similarity = psiz.keras.layers.StudentsTSimilarity( fit_tau=False, fit_alpha=False, tau_initializer=tf.keras.initializers.Constant(2.), alpha_initializer=tf.keras.initializers.Constant(1.), ) elif similarity_func == 'HeavyTailed': similarity = psiz.keras.layers.HeavyTailedSimilarity( fit_tau=False, fit_kappa=False, fit_alpha=False, tau_initializer=tf.keras.initializers.Constant(2.), kappa_initializer=tf.keras.initializers.Constant(2.), alpha_initializer=tf.keras.initializers.Constant(10.), ) elif similarity_func == "Inverse": similarity = psiz.keras.layers.InverseSimilarity( fit_tau=False, fit_mu=False, tau_initializer=tf.keras.initializers.Constant(2.), mu_initializer=tf.keras.initializers.Constant(0.000001) ) kernel = psiz.keras.layers.DistanceBased( distance=psiz.keras.layers.Minkowski( rho_initializer=tf.keras.initializers.Constant(2.), w_initializer=tf.keras.initializers.Constant(1.), trainable=False ), similarity=similarity ) model = psiz.keras.models.Rank(stimuli=stimuli, kernel=kernel) return model
Return a ground truth embedding.
https://github.com/roads/psiz/blob/37068530a78e08792e827ee55cf55e627add115e/tests/examples/test_mle_1g.py#L27-L73
import pytest import numpy as np from scipy.stats import pearsonr import tensorflow as tf import psiz
Apache License 2.0
gnosis/safe-relay-service
safe_relay_service/gas_station/gas_station.py
GasStation.get_tx_gas_prices
python
def get_tx_gas_prices(self, block_numbers: Iterable[int]) -> List[int]: cached_blocks = [] not_cached_block_numbers = [] for block_number in block_numbers: block = self._get_block_from_cache(block_number) if block: cached_blocks.append(block) else: not_cached_block_numbers.append(block_number) requested_blocks = self.ethereum_client.get_blocks( not_cached_block_numbers, full_transactions=True ) for block_number, block in zip(not_cached_block_numbers, requested_blocks): if block: requested_blocks.append(block) self._store_block_in_cache(block["number"], block) else: logger.warning( "Cannot find block-number=%d, a reorg happened", block_number ) gas_prices = [ transaction["gasPrice"] for block in requested_blocks + cached_blocks for transaction in block["transactions"] if transaction.get("gasPrice") ] return gas_prices
:param block_numbers: Block numbers to retrieve :return: Return a list with `gas_price` for every block provided
https://github.com/gnosis/safe-relay-service/blob/c36c94757c0eb3eade51006da5885eaddc4786b0/safe_relay_service/gas_station/gas_station.py#L81-L115
import math from logging import getLogger from typing import Iterable, List, Optional from django.conf import settings from django.core.cache import cache import numpy as np from web3 import Web3 from web3.types import BlockData from gnosis.eth import EthereumClient, EthereumClientProvider from .models import GasPrice logger = getLogger(__name__) class NoBlocksFound(Exception): pass class GasStationProvider: def __new__(cls): if not hasattr(cls, "instance"): if settings.FIXED_GAS_PRICE is not None: cls.instance = GasStationMock(gas_price=settings.FIXED_GAS_PRICE) else: cls.instance = GasStation( EthereumClientProvider(), settings.GAS_STATION_NUMBER_BLOCKS ) w3 = cls.instance.w3 if w3.isConnected() and int(w3.net.version) > 314158: logger.warning( "Using mock Gas Station because no `w3.net.version` was detected" ) cls.instance = GasStationMock() return cls.instance @classmethod def del_singleton(cls): if hasattr(cls, "instance"): del cls.instance class GasStation: def __init__( self, ethereum_client: EthereumClient, number_of_blocks: int = 200, cache_timeout_seconds: int = 10 * 60, constant_gas_increment: int = 1, ): self.ethereum_client = ethereum_client self.number_of_blocks = number_of_blocks self.cache_timeout = cache_timeout_seconds self.constant_gas_increment = constant_gas_increment self.w3 = self.ethereum_client.w3 def _get_block_cache_key(self, block_number: int): return "block:%d" % block_number def _get_block_from_cache(self, block_number: int) -> Optional[BlockData]: return cache.get(self._get_block_cache_key(block_number)) def _store_block_in_cache(self, block_number: int, block: BlockData): return cache.set( self._get_block_cache_key(block_number), block, self.cache_timeout ) def _get_gas_price_cache_key(self): return "gas_price" def _get_gas_price_from_cache(self) -> Optional[GasPrice]: return cache.get(self._get_gas_price_cache_key()) def _store_gas_price_in_cache(self, gas_price: GasPrice): return cache.set(self._get_gas_price_cache_key(), gas_price)
MIT License
nhynes/em
em/__main__.py
rename
python
def rename(args, _config, _extra_args): repo = pygit2.init_repository('.') name = args.name new_name = args.newname with shelve.open('.em') as emdb: if name not in emdb: return _die(E_NO_EXP.format(name)) if emdb[name]['status'] == 'running': return _die(E_RENAME_RUNNING) if new_name in emdb: return _die(E_NAME_EXISTS.format(new_name)) br = _get_br(repo, name) if br is None: return _die(E_NO_BRANCH.format(name)) if _get_br(repo, new_name) is not None: return _die(E_BRANCH_EXISTS.format(new_name)) exper_dir = _expath(name) new_exper_dir = _expath(new_name) try: os.rename(exper_dir, new_exper_dir) except OSError: return _die(E_MOVE_DIR) try: br.rename(new_name) except pygit2.GitError: os.rename(new_exper_dir, exper_dir) return _die(E_RENAME_BRANCH) with shelve.open('.em') as emdb: emdb[new_name] = emdb.pop(name)
Rename an experiment.
https://github.com/nhynes/em/blob/3d17963f70676950dec0693d4c1e320cb7903871/em/__main__.py#L526-L562
import argparse import datetime import os from os import path as osp import shelve import shutil import sys import pygit2 GIT_UNCH = {pygit2.GIT_STATUS_CURRENT, pygit2.GIT_STATUS_IGNORED} EM_KEY = '__em__' E_BRANCH_EXISTS = 'error: branch "{}" already exists' E_CHECKED_OUT = 'error: cannot run experiment on checked out branch' E_CANT_CLEAN = 'error: could not clean up {}' E_IS_NOT_RUNNING = 'error: experiment "{}" is not running' E_IS_RUNNING = 'error: experiment "{}" is already running' E_MODIFIED_SRC = 'error: not updating existing branch with source changes' E_MOVE_DIR = 'error: could not move experiment directory' E_NAME_EXISTS = 'error: experiment named "{}" already exists' E_NO_BRANCH = 'error: no branch for experiment "{}"?' E_NO_EXP = 'error: no experiment named "{}"' E_NO_PROJ = 'error: "{}" is not a project directory' E_OTHER_MACHINE = 'error: experiment "{}" is not running on this machine' E_RENAME_BRANCH = 'error: could not rename branch' E_RENAME_RUNNING = 'error: cannot rename running experiment' RUN_RECREATE_PROMPT = 'Experiment {} already exists. Recreate? [yN] ' LI = '* {}' CLEAN_NEEDS_FORCE = 'The following experiments require --force to be removed:' CLEAN_PREAMBLE = 'The following experiments will be removed:' CLEAN_SNAP_PREAMBLE = 'The following experiments\' snaps will be removed:' CLEAN_PROMPT = 'Clean up {:d} experiments? [yN] ' CLEAN_SNAPS_PROMPT = 'Clean up snaps of {:d} experiments? [yN] ' LI_RUNNING = LI + ' (running)' RESET_PREAMBLE = 'The following experiments will be reset:' RESET_PROMPT = 'Reset {:d} experiments? [yN] ' def _die(msg, status=1): print(msg, file=sys.stderr) return status def _ensure_proj(cb): def _docmd(*args, **kwargs): with shelve.open('.em') as emdb: if EM_KEY not in emdb: curdir = osp.abspath('.') return _die(E_NO_PROJ.format(curdir)) cb(*args, **kwargs) return _docmd def _expath(*args): return osp.abspath(osp.join('experiments', *args)) def proj_create(args, config, _extra_args): tmpl_repo = config['project']['template_repo'] try: pygit2.clone_repository(tmpl_repo, args.dest) shutil.rmtree(osp.join(args.dest, '.git'), ignore_errors=True) pygit2.init_repository(args.dest) except ValueError: pass for em_dir in ['experiments', 'data']: dpath = osp.join(args.dest, em_dir) if not osp.isdir(dpath): os.mkdir(dpath) with shelve.open(osp.join(args.dest, '.em')) as emdb: emdb['__em__'] = {} def _cleanup(name, emdb, repo): exper_dir = _expath(name) if name not in emdb and not osp.isdir(exper_dir): return if osp.isdir(exper_dir): shutil.rmtree(exper_dir) try: worktree = repo.lookup_worktree(name) if worktree is not None: worktree.prune(True) except pygit2.GitError: pass try: br = repo.lookup_branch(name) if br is not None: br.delete() except pygit2.GitError: pass if name in emdb: del emdb[name] def _cleanup_snaps(name, _emdb, _repo): exper_dir = _expath(name) snaps_dir = osp.join(exper_dir, 'run', 'snaps') if osp.isdir(snaps_dir): shutil.rmtree(snaps_dir) os.mkdir(snaps_dir) def _tstamp(): import time return datetime.datetime.fromtimestamp(time.time()) def _get_tracked_exts(config): return set(config['experiment']['track_files'].split(',')) def _has_src_changes(repo, config): has_src_changes = has_changes = False for filepath, status in repo.status().items(): ext = osp.splitext(osp.basename(filepath))[1][1:] changed = status not in GIT_UNCH has_changes = has_changes or changed if ext in _get_tracked_exts(config): has_src_changes = has_src_changes or changed return has_src_changes def _create_experiment(name, repo, config, base_commit=None, desc=None): head_commit = repo[repo.head.target] stash = None sig = repo.default_signature has_src_changes = _has_src_changes(repo, config) if has_src_changes: repo.index.add_all([f'*.{ext}' for ext in _get_tracked_exts(config)]) snap_tree_id = repo.index.write_tree() if base_commit is not None: if base_commit.tree_id != snap_tree_id: stash = repo.stash(sig, include_untracked=True) else: base_commit = head_commit with shelve.open('.em') as emdb: for existing_name in emdb: existing_br = repo.lookup_branch(existing_name) if existing_br is None: continue existing_ci = existing_br.get_object() if existing_ci and existing_ci.tree_id == snap_tree_id: base_commit = existing_ci break else: base_commit = head_commit if base_commit != head_commit: repo.reset(base_commit.id, pygit2.GIT_RESET_HARD if stash else pygit2.GIT_RESET_SOFT) exper_dir = _expath(name) repo.add_worktree(name, exper_dir) if has_src_changes and base_commit == head_commit: repo.create_commit(f'refs/heads/{name}', sig, sig, desc or 'setup experiment', snap_tree_id, [base_commit.id]) workdir = pygit2.Repository(exper_dir) workdir.reset(workdir.head.target, pygit2.GIT_RESET_HARD) os.symlink(osp.abspath('data'), osp.join(exper_dir, 'data'), target_is_directory=True) if base_commit != head_commit: repo.reset(head_commit.id, pygit2.GIT_RESET_HARD if stash else pygit2.GIT_RESET_SOFT) if stash: repo.stash_pop() def _run_job(name, config, gpu=None, prog_args=None, background=False): import socket import subprocess import daemon exper_dir = _expath(name) runem_cmd = ([config['experiment']['prog']] + config['experiment']['prog_args'] + (prog_args or [])) env = os.environ if gpu: env['CUDA_VISIBLE_DEVICES'] = gpu def _do_run_job(): try: job = subprocess.Popen(runem_cmd, cwd=exper_dir, env=env, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr) with shelve.open('.em', writeback=True) as emdb: emdb[name] = { 'started': _tstamp(), 'status': 'running', 'pid': job.pid, 'hostname': socket.getfqdn(), } if gpu: emdb[name]['gpu'] = gpu job.wait() with shelve.open('.em', writeback=True) as emdb: status = 'completed' if job.returncode == 0 else 'error' emdb[name]['status'] = status except KeyboardInterrupt: with shelve.open('.em', writeback=True) as emdb: emdb[name]['status'] = 'interrupted' finally: with shelve.open('.em', writeback=True) as emdb: emdb[name].pop('pid', None) emdb[name]['ended'] = _tstamp() if background: curdir = osp.abspath(os.curdir) with daemon.DaemonContext(working_directory=curdir): _do_run_job() else: _do_run_job() def run(args, config, prog_args): name = args.name repo = pygit2.Repository('.') with shelve.open('.em', writeback=True) as emdb: exp_info = emdb.get(name) if exp_info: if exp_info['status'] == 'running': return _die(E_IS_RUNNING.format(name)) newp = input(RUN_RECREATE_PROMPT.format(name)) if newp.lower() != 'y': return _cleanup(name, emdb, repo) br = None emdb[name] = {'status': 'starting'} try: br = repo.lookup_branch(name) except pygit2.GitError: br = None base_commit = None if br is not None: if br.is_checked_out(): return _die(E_CHECKED_OUT) base_commit = repo[br.target] br.delete() _create_experiment(name, repo, config, base_commit, desc=args.desc) return _run_job(name, config, args.gpu, prog_args, args.background) def fork(args, config, _extra_args): name = args.name fork_name = args.fork_name repo = pygit2.Repository('.') with shelve.open('.em', writeback=True) as emdb: if fork_name in emdb: return _die(E_NAME_EXISTS.format(fork_name)) exp_info = emdb.get(name) if not exp_info: return _die(E_NO_EXP.format(name)) try: fork_br = repo.lookup_branch(fork_name) if fork_br is not None: return _die(E_BRANCH_EXISTS.format(fork_name)) except pygit2.GitError: pass try: br = repo.lookup_branch(name) except pygit2.GitError: br = None if br is None: return _die(E_NO_BRANCH.format(name)) emdb[fork_name] = { 'status': 'starting', 'clone_of': name, } base_commit = repo[br.target] _create_experiment(fork_name, repo, config, base_commit) fork_snap_dir = osp.join(_expath(fork_name), 'run', 'snaps') os.makedirs(fork_snap_dir) def _link(*path_comps): orig_path = osp.join(_expath(name), *path_comps) os.symlink(orig_path, orig_path.replace(name, fork_name)) _link('run', 'opts.pkl') orig_snap_dir = fork_snap_dir.replace(fork_name, name) for snap_name in os.listdir(orig_snap_dir): _link('run', 'snaps', snap_name) def resume(args, config, prog_args): name = args.name repo = pygit2.Repository('.') with shelve.open('.em') as emdb: if name not in emdb: return _die(E_NO_EXP.format(name)) info = emdb[name] if 'pid' in info or info.get('status') == 'running': return _die(E_IS_RUNNING.format(name)) try: repo.lookup_branch(name) except pygit2.GitError: return _die(E_NO_EXP.format(name)) prog_args.append('--resume') if args.epoch: prog_args.append(args.epoch) return _run_job(name, config, args.gpu, prog_args, args.background) def _print_sorted(lines, tmpl=LI): print('\n'.join(map(tmpl.format, sorted(lines)))) def clean(args, _config, _extra_args): from fnmatch import fnmatch repo = pygit2.Repository('.') cleanup = _cleanup_snaps if args.snaps else _cleanup with shelve.open('.em', writeback=True) as emdb: matched = set() needs_force = set() for name in emdb: if name == EM_KEY: continue is_match = sum(fnmatch(name, patt) for patt in args.name) is_excluded = sum(fnmatch(name, patt) for patt in args.exclude) if not is_match or is_excluded: continue matched.add(name) info = emdb[name] if 'pid' in info or info.get('status') == 'running': needs_force.add(name) if not matched: return clean_noforce = matched - needs_force to_clean = clean_noforce if not args.force else matched if len(args.name) == 1 and args.name[0] in to_clean: cleanup(args.name[0], emdb, repo) return if not to_clean: return print(CLEAN_SNAP_PREAMBLE if args.snaps else CLEAN_PREAMBLE) _print_sorted(clean_noforce) if args.force: _print_sorted(needs_force, tmpl=LI_RUNNING) elif needs_force: print(CLEAN_NEEDS_FORCE) _print_sorted(needs_force) prompt = CLEAN_SNAPS_PROMPT if args.snaps else CLEAN_PROMPT cleanp = input(prompt.format(len(to_clean))) if cleanp.lower() != 'y': return for name in to_clean: try: cleanup(name, emdb, repo) except OSError: print(E_CANT_CLEAN.format(name)) def reset(args, _config, _extra_args): from fnmatch import fnmatch with shelve.open('.em', writeback=True) as emdb: def _reset(name): for state_item in ['pid', 'gpu']: emdb[name].pop(state_item, None) emdb[name]['status'] = 'reset' if len(args.name) == 1 and args.name[0] in emdb: _reset(args.name[0]) return to_reset = set() for name in emdb: is_match = sum(fnmatch(name, patt) for patt in args.name) is_excluded = sum(fnmatch(name, patt) for patt in args.exclude) if not is_match or is_excluded: continue to_reset.add(name) if not to_reset: return print(RESET_PREAMBLE) _print_sorted(to_reset) resetp = input(RESET_PROMPT.format(len(to_reset))) if resetp.lower() != 'y': return for name in to_reset: _reset(name) def list_experiments(args, _config, _extra_args): import subprocess if args.filter: filter_key, filter_value = args.filter.split('=') def _filt(stats): return filter_key in stats and stats[filter_key] == filter_value with shelve.open('.em') as emdb: if args.filter: names = {name for name, info in sorted(emdb.items()) if _filt(info)} else: names = emdb.keys() names -= {EM_KEY} if not names: return subprocess.run( ['column'], input='\n'.join(sorted(names)) + '\n', encoding='utf8') def show(args, _config, _extra_args): import pickle import pprint name = args.name with shelve.open('.em') as emdb: if name not in emdb or name == EM_KEY: return _die(E_NO_EXP.format(name)) for info_name, info_val in sorted(emdb[name].items()): if isinstance(info_val, datetime.date): info_val = info_val.ctime() print(f'{info_name}: {info_val}') if not args.opts: return opts_path = _expath(name, 'run', 'opts.pkl') with open(opts_path, 'rb') as f_opts: print('\noptions:') opts = pickle.load(f_opts) cols = shutil.get_terminal_size((80, 20)).columns pprint.pprint(vars(opts), indent=2, compact=True, width=cols) def _ps(pid): try: os.kill(pid, 0) except OSError: return False return True def ctl(args, _config, _extra_args): import signal name = args.name with shelve.open('.em') as emdb: if name not in emdb: return _die(E_NO_EXP.format(name)) pid = emdb[name].get('pid') if not pid: return _die(E_IS_NOT_RUNNING.format(name)) if not _ps(pid): return _die(E_OTHER_MACHINE.format(name)) cmd = args.cmd[0] if cmd == 'stop': os.kill(pid, signal.SIGINT) else: ctl_file = _expath(name, 'run', 'ctl') with open(ctl_file, 'w') as f_ctl: print(' '.join(args.cmd), file=f_ctl) def _get_br(repo, branch_name): br = None try: br = repo.lookup_branch(branch_name) except pygit2.GitError: pass return br
MIT License
openstack/manila
manila/scheduler/filters/capacity.py
CapacityFilter.host_passes
python
def host_passes(self, host_state, filter_properties): share_size = filter_properties.get('size', 0) if host_state.free_capacity_gb is None: LOG.error("Free capacity not set: " "share node info collection broken.") return False free_space = host_state.free_capacity_gb total_space = host_state.total_capacity_gb if filter_properties.get('snapshot_id'): reserved = float(host_state.reserved_snapshot_percentage) / 100 else: reserved = float(host_state.reserved_percentage) / 100 if free_space == 'unknown': return True elif total_space == 'unknown': return reserved == 0 and share_size <= free_space total = float(total_space) if total <= 0: LOG.warning("Insufficient free space for share creation. " "Total capacity is %(total).2f on host %(host)s.", {"total": total, "host": host_state.host}) return False free = math.floor(free_space - total * reserved) msg_args = {"host": host_state.host, "requested": share_size, "available": free} LOG.debug("Space information for share creation " "on host %(host)s (requested / avail): " "%(requested)s/%(available)s", msg_args) share_type = filter_properties.get('share_type', {}) use_thin_logic = utils.use_thin_logic(share_type) thin_provisioning = utils.thin_provisioning( host_state.thin_provisioning) if (use_thin_logic and thin_provisioning and host_state.max_over_subscription_ratio >= 1): provisioned_ratio = ((host_state.provisioned_capacity_gb + share_size) / total) if provisioned_ratio > host_state.max_over_subscription_ratio: LOG.warning( "Insufficient free space for thin provisioning. " "The ratio of provisioned capacity over total capacity " "%(provisioned_ratio).2f would exceed the maximum over " "subscription ratio %(oversub_ratio).2f on host " "%(host)s.", {"provisioned_ratio": provisioned_ratio, "oversub_ratio": host_state.max_over_subscription_ratio, "host": host_state.host}) return False else: adjusted_free_virtual = ( free * host_state.max_over_subscription_ratio) return adjusted_free_virtual >= share_size elif (use_thin_logic and thin_provisioning and host_state.max_over_subscription_ratio < 1): LOG.error("Invalid max_over_subscription_ratio: %(ratio)s. " "Valid value should be >= 1.", {"ratio": host_state.max_over_subscription_ratio}) return False if free < share_size: LOG.warning("Insufficient free space for share creation " "on host %(host)s (requested / avail): " "%(requested)s/%(available)s", msg_args) return False return True
Return True if host has sufficient capacity.
https://github.com/openstack/manila/blob/34d209484366cd921e052d37c5f9daef5e97af20/manila/scheduler/filters/capacity.py#L33-L129
import math from oslo_log import log from manila.scheduler.filters import base_host from manila.scheduler import utils LOG = log.getLogger(__name__) class CapacityFilter(base_host.BaseHostFilter):
Apache License 2.0
morucci/repoxplorer
repoxplorer/index/projects.py
Projects._validate_projects
python
def _validate_projects(self, tids): _, issues = self._check_basic('projects', projects_schema, 'Project') if issues: return issues for d in self.data: projects = d.get('projects', {}) for pid, detail in projects.items(): for rid, repo in detail['repos'].items(): template = repo['template'] if template not in tids: issues.append("Project ID '%s' Repo ID '%s' " "references an unknown template %s" % ( pid, rid, template)) return issues
Validate self.data consistencies for projects
https://github.com/morucci/repoxplorer/blob/9f7a19fdf264985fd4c144d2e28724c30c65951d/repoxplorer/index/projects.py#L579-L597
import copy import logging from datetime import datetime from elasticsearch.helpers import bulk from elasticsearch.helpers import BulkIndexError from elasticsearch.helpers import scan as scanner from pecan import conf from repoxplorer import index from repoxplorer.index import YAMLDefinition from repoxplorer.index import add_params from repoxplorer.index import date2epoch logger = logging.getLogger(__name__) project_templates_schema = r""" $schema: http://json-schema.org/draft-04/schema definitions: release: type: object additionalProperties: false required: - name - date properties: name: type: string date: type: string type: object properties: project-templates: type: object additionalProperties: false patternProperties: ^[a-zA-Z0-9_/\. \-\+]+$: type: object additionalProperties: false required: - uri - branches properties: uri: type: string gitweb: type: string branches: type: array items: type: string minItems: 1 tags: type: array items: type: string paths: type: array items: type: string parsers: type: array items: type: string releases: type: array items: $ref: "#/definitions/release" index-tags: type: boolean """ project_templates_example = """ templates: default: uri: https://github.com/%(name)s branches: - master - stable/mitaka - stable/newton - stable/ocata gitweb: https://github.com/%(name)s/commit/%%(sha)s parsers: - .*(blueprint) ([^ .]+).* releases: - name: 1.0 date: 2016-12-20 - name: 2.0 date: 2016-12-31 tags: - openstack - language:python - type:cloud paths: - project/tests/ index-tags: true """ projects_schema = r""" $schema: http://json-schema.org/draft-04/schema definitions: release: type: object additionalProperties: false required: - name - date properties: name: type: string date: type: string type: object properties: projects: type: object additionalProperties: false patternProperties: ^[a-zA-Z0-9_/\. \-\+]+$: type: object additionalProperties: false properties: description: type: string logo: type: string meta-ref: type: boolean bots-group: type: string releases: type: array items: $ref: "#/definitions/release" repos: type: object additionalProperties: false patternProperties: ^[a-zA-Z0-9_/\. \-\+]+$: type: object additionalProperties: false required: - template properties: template: type: string description: type: string paths: type: array items: type: string tags: type: array items: type: string forks: type: integer stars: type: integer watchers: type: integer branches: type: array items: type: string minItems: 1 """ projects_example = """ projects: Barbican: description: The Barbican project bots-group: openstack-ci-bots releases: - name: ocata date: 2017-02-22 repos: openstack/barbican: template: default openstack/python-barbicanclient: template: default description: The barbican client forks: 10 watchers: 20 stars: 30 tags: - client - language:python paths: - project/tests/ Swift: repos: openstack/swift: template: default branches: - dev openstack/python-swiftclient: template: default """ class EProjects(object): PROPERTIES = { "aname": {"type": "text"}, "name": {"type": "keyword"}, "description": {"type": "text"}, "logo": {"type": "binary"}, "meta-ref": {"type": "boolean"}, "bots-group": {"type": "keyword"}, "index-tags": {"type": "boolean"}, "project": {"type": "keyword"}, "releases": { "type": "nested", "properties": { "name": {"type": "keyword"}, "date": {"type": "keyword"}, } }, "refs": { "type": "nested", "properties": { "aname": {"type": "text"}, "name": {"type": "keyword"}, "description": {"type": "text"}, "forks": {"type": "integer"}, "watchers": {"type": "integer"}, "stars": {"type": "integer"}, "uri": {"type": "keyword"}, "gitweb": {"type": "keyword"}, "branch": {"type": "keyword"}, "tags": {"type": "keyword"}, "fullrid": {"type": "keyword"}, "shortrid": {"type": "keyword"}, "paths": {"type": "keyword"}, "parsers": {"type": "keyword"}, "index-tags": {"type": "boolean"}, "releases": { "type": "nested", "properties": { "name": {"type": "keyword"}, "date": {"type": "keyword"}, } } } } } def __init__(self, connector=None): self.es = connector.es self.ic = connector.ic self.index = connector.index self.dbname = 'projects' self.mapping = { self.dbname: { "properties": self.PROPERTIES, } } if not self.ic.exists_type(index=self.index, doc_type=self.dbname): kwargs = add_params(self.es) self.ic.put_mapping( index=self.index, doc_type=self.dbname, body=self.mapping, **kwargs) def manage_bulk_err(self, exc): errs = [e['create']['error'] for e in exc[1]] if not all([True for e in errs if e['type'] == 'document_already_exists_exception']): raise Exception( "Unable to create one or more doc: %s" % errs) def create(self, docs): def gen(): for pid, doc in docs: d = {} d['_index'] = self.index d['_type'] = self.dbname d['_op_type'] = 'create' d['_id'] = pid d['_source'] = doc yield d try: bulk(self.es, gen()) except BulkIndexError as exc: self.manage_bulk_err(exc) self.es.indices.refresh(index=self.index) def delete_all(self): def gen(docs): for doc in docs: d = {} d['_index'] = self.index d['_type'] = self.dbname d['_op_type'] = 'delete' d['_id'] = doc['_id'] yield d bulk(self.es, gen(self.get_all(source=False))) self.es.indices.refresh(index=self.index) def load(self, projects, rid2projects): self.delete_all() self.create(projects.items()) self.create(rid2projects.items()) def get_all(self, source=True, type=None): query = { '_source': source, 'query': { 'match_all': {} } } return scanner(self.es, query=query, index=self.index) def get_by_id(self, id, source=True): try: res = self.es.get(index=self.index, doc_type=self.dbname, _source=source, id=id) return res['_source'] except Exception as e: logger.error('Unable to get the doc. %s' % e) def exists(self, id): return self.es.exists( index=self.index, doc_type=self.dbname, id=id) def get_by_attr_match(self, attribute, value, source=True): params = {'index': self.index} body = { "query": { 'bool': { 'must': {'term': {attribute: value}}, } } } params['body'] = body params['_source'] = source params['size'] = 10000 res = self.es.search(**params) took = res['took'] hits = res['hits']['total'] docs = [r['_source'] for r in res['hits']['hits']] return took, hits, docs def get_by_nested_attr_match( self, attribute, values, source=True, inner_source=True, inner_hits_max=100): if not isinstance(values, list): values = (values,) params = {'index': self.index} body = { "query": { "bool": { "must": { "nested": { "path": "refs", "inner_hits": { "_source": inner_source, "size": inner_hits_max, }, "query": { "bool": { "should": [ {"term": {"refs.%s" % attribute: value}} for value in values ] } } } } } } } params['body'] = body params['_source'] = source params['size'] = 10000 res = self.es.search(**params) inner_hits = [r['inner_hits'] for r in res['hits']['hits']] took = res['took'] hits = res['hits']['total'] docs = [r['_source'] for r in res['hits']['hits']] return took, hits, docs, inner_hits def get_projects_by_fullrids(self, fullrids): body = {"ids": fullrids} try: res = self.es.mget(index=self.index, doc_type=self.dbname, _source=True, body=body) return res['docs'] except Exception as e: logger.error('Unable to get projects by fullrids. %s' % e) class Projects(YAMLDefinition): def __init__(self, db_path=None, db_default_file=None, db_cache_path=None, con=None, dump_yaml_in_index=None, vonly=False): self.db_path = db_path or conf.get('db_path') self.db_default_file = db_default_file or conf.get('db_default_file') self.db_cache_path = db_cache_path or conf.get('db_cache_path') if vonly: return self.eprojects = EProjects( connector=(con or index.Connector(index_suffix='projects'))) self.el_version = self.eprojects.es.info().get( 'version', {}).get('number', '') if dump_yaml_in_index: YAMLDefinition.__init__( self, self.db_path, self.db_default_file, self.db_cache_path) issues = self.validate() if issues: raise RuntimeError(issues) self._enrich_projects() projects, rid2projects = self._flatten_projects() self.eprojects.load(projects, rid2projects) def _merge(self): merged_templates = {} merged_projects = {} for d in self.data: templates = d.get('project-templates', {}) projects = d.get('projects', {}) merged_templates.update(copy.copy(templates)) for p, v in projects.items(): merged_projects.setdefault(p, copy.copy(v)) merged_projects[p]['repos'].update(copy.copy(v['repos'])) self.templates = {} self.projects = {} if self.default_data: self.templates = copy.copy( self.default_data.get('project-templates', {})) self.projects = copy.copy( self.default_data.get('projects', {})) self.templates.update(merged_templates) self.projects.update(merged_projects) def _enrich_projects(self): for detail in list(self.projects.values()): if 'meta-ref' not in detail: detail['meta-ref'] = False for rid, repo in list(detail['repos'].items()): tags = [] if 'tags' in repo and repo['tags']: tags = copy.copy(repo['tags']) branches = [] if 'branches' in repo: branches = copy.copy(repo['branches']) paths = [] if 'paths' in repo: paths = copy.copy(repo['paths']) if 'template' in repo: repo.update(copy.deepcopy( self.templates[repo['template']])) del repo['template'] for key in ('uri', 'gitweb'): if key in repo: repo[key] = repo[key] % {'name': rid} if 'tags' not in repo: repo['tags'] = [] repo['tags'].extend(tags) repo['tags'] = list(set(repo['tags'])) if branches: repo['branches'] = branches if paths: repo['paths'] = paths if 'parsers' not in repo: repo['parsers'] = [] if 'releases' not in repo: repo['releases'] = [] if 'index-tags' not in repo: repo['index-tags'] = True for release in repo['releases']: release['date'] = date2epoch(release['date']) def _flatten_projects(self): flatten = {} rid2projects = {} for pid, detail in self.projects.items(): flatten[pid] = { 'name': pid, 'aname': pid, 'meta-ref': detail.get('meta-ref'), 'refs': [], 'description': detail.get('description'), 'logo': detail.get('logo'), 'bots-group': detail.get('bots-group'), 'releases': detail.get('releases', []), } for release in flatten[pid]['releases']: release['date'] = date2epoch(release['date']) for rid, repo in detail['repos'].items(): for branch in repo['branches']: r = {} r.update(copy.deepcopy(repo)) r['name'] = rid r['aname'] = rid r['branch'] = branch del r['branches'] r['fullrid'] = "%s:%s:%s" % ( r['uri'], r['name'], r['branch']) r['shortrid'] = "%s:%s" % (r['uri'], r['name']) flatten[pid]['refs'].append(r) rid2projects.setdefault(r['fullrid'], {'project': []}) if pid not in rid2projects[r['fullrid']]['project']: rid2projects[r['fullrid']]['project'].append(pid) return flatten, rid2projects def _validate_templates(self): ids, issues = self._check_basic('project-templates', project_templates_schema, 'Project template') if issues: return ids, issues for d in self.data: templates = d.get('project-templates', {}) for tid, templates in templates.items(): if 'releases' in templates: for r in templates['releases']: try: datetime.strptime(r['date'], "%Y-%m-%d") except Exception: issues.append("Wrong date format %s defined " "in template %s" % (r['date'], tid)) return ids, issues
Apache License 2.0
cjolowicz/hypermodern-python
noxfile.py
mypy
python
def mypy(session: Session) -> None: args = session.posargs or locations install_with_constraints(session, "mypy") session.run("mypy", *args)
Type-check using mypy.
https://github.com/cjolowicz/hypermodern-python/blob/ed593b8399dd94e2fe6f618aca8ea5f43710b541/noxfile.py#L86-L90
import tempfile from typing import Any import nox from nox.sessions import Session package = "hypermodern_python" nox.options.sessions = "lint", "safety", "mypy", "pytype", "tests" locations = "src", "tests", "noxfile.py", "docs/conf.py" def install_with_constraints(session: Session, *args: str, **kwargs: Any) -> None: with tempfile.NamedTemporaryFile() as requirements: session.run( "poetry", "export", "--dev", "--format=requirements.txt", f"--output={requirements.name}", external=True, ) session.install(f"--constraint={requirements.name}", *args, **kwargs) @nox.session(python="3.8") def black(session: Session) -> None: args = session.posargs or locations install_with_constraints(session, "black") session.run("black", *args) @nox.session(python=["3.8", "3.7"]) def lint(session: Session) -> None: args = session.posargs or locations install_with_constraints( session, "flake8", "flake8-annotations", "flake8-bandit", "flake8-black", "flake8-bugbear", "flake8-docstrings", "flake8-import-order", "darglint", ) session.run("flake8", *args) @nox.session(python="3.8") def safety(session: Session) -> None: with tempfile.NamedTemporaryFile() as requirements: session.run( "poetry", "export", "--dev", "--format=requirements.txt", "--without-hashes", f"--output={requirements.name}", external=True, ) install_with_constraints(session, "safety") session.run("safety", "check", f"--file={requirements.name}", "--full-report") @nox.session(python=["3.8", "3.7"])
MIT License
ramonhagenaars/jsons
jsons/serializers/default_timezone.py
default_timezone_serializer
python
def default_timezone_serializer(obj: timezone, **kwargs) -> dict: name = obj.tzname(None) offset = dump(obj.utcoffset(None), **kwargs) return { 'name': name, 'offset': offset }
Serialize the given timezone instance to a dict holding the total seconds. :param obj: the timezone instance that is to be serialized. :param kwargs: not used. :return: ``timezone`` as a dict.
https://github.com/ramonhagenaars/jsons/blob/12594ebb13247a26a8511644e5ca324817075385/jsons/serializers/default_timezone.py#L6-L19
from datetime import timezone from jsons._dump_impl import dump
MIT License
microsoft/azure-devops-python-api
azure-devops/azure/devops/v6_0/work_item_tracking_process/work_item_tracking_process_client.py
WorkItemTrackingProcessClient.create_new_process
python
def create_new_process(self, create_request): content = self._serialize.body(create_request, 'CreateProcessModel') response = self._send(http_method='POST', location_id='02cc6a73-5cfb-427d-8c8e-b49fb086e8af', version='6.0-preview.2', content=content) return self._deserialize('ProcessInfo', response)
CreateNewProcess. [Preview API] Creates a process. :param :class:`<CreateProcessModel> <azure.devops.v6_0.work_item_tracking_process.models.CreateProcessModel>` create_request: CreateProcessModel. :rtype: :class:`<ProcessInfo> <azure.devops.v6_0.work_item_tracking_process.models.ProcessInfo>`
https://github.com/microsoft/azure-devops-python-api/blob/451cade4c475482792cbe9e522c1fee32393139e/azure-devops/azure/devops/v6_0/work_item_tracking_process/work_item_tracking_process_client.py#L642-L653
 from msrest import Serializer, Deserializer from ...client import Client from . import models class WorkItemTrackingProcessClient(Client): def __init__(self, base_url=None, creds=None): super(WorkItemTrackingProcessClient, self).__init__(base_url, creds) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) resource_area_identifier = '5264459e-e5e0-4bd8-b118-0985e68a4ec5' def create_process_behavior(self, behavior, process_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') content = self._serialize.body(behavior, 'ProcessBehaviorCreateRequest') response = self._send(http_method='POST', location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e', version='6.0-preview.2', route_values=route_values, content=content) return self._deserialize('ProcessBehavior', response) def delete_process_behavior(self, process_id, behavior_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if behavior_ref_name is not None: route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str') self._send(http_method='DELETE', location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e', version='6.0-preview.2', route_values=route_values) def get_process_behavior(self, process_id, behavior_ref_name, expand=None): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if behavior_ref_name is not None: route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str') query_parameters = {} if expand is not None: query_parameters['$expand'] = self._serialize.query('expand', expand, 'str') response = self._send(http_method='GET', location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e', version='6.0-preview.2', route_values=route_values, query_parameters=query_parameters) return self._deserialize('ProcessBehavior', response) def get_process_behaviors(self, process_id, expand=None): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') query_parameters = {} if expand is not None: query_parameters['$expand'] = self._serialize.query('expand', expand, 'str') response = self._send(http_method='GET', location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e', version='6.0-preview.2', route_values=route_values, query_parameters=query_parameters) return self._deserialize('[ProcessBehavior]', self._unwrap_collection(response)) def update_process_behavior(self, behavior_data, process_id, behavior_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if behavior_ref_name is not None: route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str') content = self._serialize.body(behavior_data, 'ProcessBehaviorUpdateRequest') response = self._send(http_method='PUT', location_id='d1800200-f184-4e75-a5f2-ad0b04b4373e', version='6.0-preview.2', route_values=route_values, content=content) return self._deserialize('ProcessBehavior', response) def create_control_in_group(self, control, process_id, wit_ref_name, group_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') content = self._serialize.body(control, 'Control') response = self._send(http_method='POST', location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58', version='6.0-preview.1', route_values=route_values, content=content) return self._deserialize('Control', response) def move_control_to_group(self, control, process_id, wit_ref_name, group_id, control_id, remove_from_group_id=None): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') if control_id is not None: route_values['controlId'] = self._serialize.url('control_id', control_id, 'str') query_parameters = {} if remove_from_group_id is not None: query_parameters['removeFromGroupId'] = self._serialize.query('remove_from_group_id', remove_from_group_id, 'str') content = self._serialize.body(control, 'Control') response = self._send(http_method='PUT', location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58', version='6.0-preview.1', route_values=route_values, query_parameters=query_parameters, content=content) return self._deserialize('Control', response) def remove_control_from_group(self, process_id, wit_ref_name, group_id, control_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') if control_id is not None: route_values['controlId'] = self._serialize.url('control_id', control_id, 'str') self._send(http_method='DELETE', location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58', version='6.0-preview.1', route_values=route_values) def update_control(self, control, process_id, wit_ref_name, group_id, control_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') if control_id is not None: route_values['controlId'] = self._serialize.url('control_id', control_id, 'str') content = self._serialize.body(control, 'Control') response = self._send(http_method='PATCH', location_id='1f59b363-a2d0-4b7e-9bc6-eb9f5f3f0e58', version='6.0-preview.1', route_values=route_values, content=content) return self._deserialize('Control', response) def add_field_to_work_item_type(self, field, process_id, wit_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') content = self._serialize.body(field, 'AddProcessWorkItemTypeFieldRequest') response = self._send(http_method='POST', location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196', version='6.0-preview.2', route_values=route_values, content=content) return self._deserialize('ProcessWorkItemTypeField', response) def get_all_work_item_type_fields(self, process_id, wit_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') response = self._send(http_method='GET', location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196', version='6.0-preview.2', route_values=route_values) return self._deserialize('[ProcessWorkItemTypeField]', self._unwrap_collection(response)) def get_work_item_type_field(self, process_id, wit_ref_name, field_ref_name, expand=None): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if field_ref_name is not None: route_values['fieldRefName'] = self._serialize.url('field_ref_name', field_ref_name, 'str') query_parameters = {} if expand is not None: query_parameters['$expand'] = self._serialize.query('expand', expand, 'str') response = self._send(http_method='GET', location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196', version='6.0-preview.2', route_values=route_values, query_parameters=query_parameters) return self._deserialize('ProcessWorkItemTypeField', response) def remove_work_item_type_field(self, process_id, wit_ref_name, field_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if field_ref_name is not None: route_values['fieldRefName'] = self._serialize.url('field_ref_name', field_ref_name, 'str') self._send(http_method='DELETE', location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196', version='6.0-preview.2', route_values=route_values) def update_work_item_type_field(self, field, process_id, wit_ref_name, field_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if field_ref_name is not None: route_values['fieldRefName'] = self._serialize.url('field_ref_name', field_ref_name, 'str') content = self._serialize.body(field, 'UpdateProcessWorkItemTypeFieldRequest') response = self._send(http_method='PATCH', location_id='bc0ad8dc-e3f3-46b0-b06c-5bf861793196', version='6.0-preview.2', route_values=route_values, content=content) return self._deserialize('ProcessWorkItemTypeField', response) def add_group(self, group, process_id, wit_ref_name, page_id, section_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if page_id is not None: route_values['pageId'] = self._serialize.url('page_id', page_id, 'str') if section_id is not None: route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str') content = self._serialize.body(group, 'Group') response = self._send(http_method='POST', location_id='766e44e1-36a8-41d7-9050-c343ff02f7a5', version='6.0-preview.1', route_values=route_values, content=content) return self._deserialize('Group', response) def move_group_to_page(self, group, process_id, wit_ref_name, page_id, section_id, group_id, remove_from_page_id, remove_from_section_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if page_id is not None: route_values['pageId'] = self._serialize.url('page_id', page_id, 'str') if section_id is not None: route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') query_parameters = {} if remove_from_page_id is not None: query_parameters['removeFromPageId'] = self._serialize.query('remove_from_page_id', remove_from_page_id, 'str') if remove_from_section_id is not None: query_parameters['removeFromSectionId'] = self._serialize.query('remove_from_section_id', remove_from_section_id, 'str') content = self._serialize.body(group, 'Group') response = self._send(http_method='PUT', location_id='766e44e1-36a8-41d7-9050-c343ff02f7a5', version='6.0-preview.1', route_values=route_values, query_parameters=query_parameters, content=content) return self._deserialize('Group', response) def move_group_to_section(self, group, process_id, wit_ref_name, page_id, section_id, group_id, remove_from_section_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if page_id is not None: route_values['pageId'] = self._serialize.url('page_id', page_id, 'str') if section_id is not None: route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') query_parameters = {} if remove_from_section_id is not None: query_parameters['removeFromSectionId'] = self._serialize.query('remove_from_section_id', remove_from_section_id, 'str') content = self._serialize.body(group, 'Group') response = self._send(http_method='PUT', location_id='766e44e1-36a8-41d7-9050-c343ff02f7a5', version='6.0-preview.1', route_values=route_values, query_parameters=query_parameters, content=content) return self._deserialize('Group', response) def remove_group(self, process_id, wit_ref_name, page_id, section_id, group_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if page_id is not None: route_values['pageId'] = self._serialize.url('page_id', page_id, 'str') if section_id is not None: route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') self._send(http_method='DELETE', location_id='766e44e1-36a8-41d7-9050-c343ff02f7a5', version='6.0-preview.1', route_values=route_values) def update_group(self, group, process_id, wit_ref_name, page_id, section_id, group_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if page_id is not None: route_values['pageId'] = self._serialize.url('page_id', page_id, 'str') if section_id is not None: route_values['sectionId'] = self._serialize.url('section_id', section_id, 'str') if group_id is not None: route_values['groupId'] = self._serialize.url('group_id', group_id, 'str') content = self._serialize.body(group, 'Group') response = self._send(http_method='PATCH', location_id='766e44e1-36a8-41d7-9050-c343ff02f7a5', version='6.0-preview.1', route_values=route_values, content=content) return self._deserialize('Group', response) def get_form_layout(self, process_id, wit_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') response = self._send(http_method='GET', location_id='fa8646eb-43cd-4b71-9564-40106fd63e40', version='6.0-preview.1', route_values=route_values) return self._deserialize('FormLayout', response) def create_list(self, picklist): content = self._serialize.body(picklist, 'PickList') response = self._send(http_method='POST', location_id='01e15468-e27c-4e20-a974-bd957dcccebc', version='6.0-preview.1', content=content) return self._deserialize('PickList', response) def delete_list(self, list_id): route_values = {} if list_id is not None: route_values['listId'] = self._serialize.url('list_id', list_id, 'str') self._send(http_method='DELETE', location_id='01e15468-e27c-4e20-a974-bd957dcccebc', version='6.0-preview.1', route_values=route_values) def get_list(self, list_id): route_values = {} if list_id is not None: route_values['listId'] = self._serialize.url('list_id', list_id, 'str') response = self._send(http_method='GET', location_id='01e15468-e27c-4e20-a974-bd957dcccebc', version='6.0-preview.1', route_values=route_values) return self._deserialize('PickList', response) def get_lists_metadata(self): response = self._send(http_method='GET', location_id='01e15468-e27c-4e20-a974-bd957dcccebc', version='6.0-preview.1') return self._deserialize('[PickListMetadata]', self._unwrap_collection(response)) def update_list(self, picklist, list_id): route_values = {} if list_id is not None: route_values['listId'] = self._serialize.url('list_id', list_id, 'str') content = self._serialize.body(picklist, 'PickList') response = self._send(http_method='PUT', location_id='01e15468-e27c-4e20-a974-bd957dcccebc', version='6.0-preview.1', route_values=route_values, content=content) return self._deserialize('PickList', response) def add_page(self, page, process_id, wit_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') content = self._serialize.body(page, 'Page') response = self._send(http_method='POST', location_id='1cc7b29f-6697-4d9d-b0a1-2650d3e1d584', version='6.0-preview.1', route_values=route_values, content=content) return self._deserialize('Page', response) def remove_page(self, process_id, wit_ref_name, page_id): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') if page_id is not None: route_values['pageId'] = self._serialize.url('page_id', page_id, 'str') self._send(http_method='DELETE', location_id='1cc7b29f-6697-4d9d-b0a1-2650d3e1d584', version='6.0-preview.1', route_values=route_values) def update_page(self, page, process_id, wit_ref_name): route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name is not None: route_values['witRefName'] = self._serialize.url('wit_ref_name', wit_ref_name, 'str') content = self._serialize.body(page, 'Page') response = self._send(http_method='PATCH', location_id='1cc7b29f-6697-4d9d-b0a1-2650d3e1d584', version='6.0-preview.1', route_values=route_values, content=content) return self._deserialize('Page', response)
MIT License
ncgeib/pypret
pypret/material.py
BaseMaterial.n
python
def n(self, x, unit='wl'): return self._func(self._convert(x, unit))
The refractive index at frequency `x` specified in units `unit`.
https://github.com/ncgeib/pypret/blob/d47deb675640439df7c8b7c08d71f45ecea3c568/pypret/material.py#L69-L71
import numpy as np from .frequencies import convert from . import lib from . import io class BaseMaterial(io.IO): def __init__(self, coefficients, freq_range, scaling=1.0e6, check_bounds=True, name="", long_name=""): if len(freq_range) != 2: raise ValueError("Frequency range must specified with two elements.") self._coefficients = np.array(coefficients) self._range = np.array(freq_range) self._scaling = scaling self.check = check_bounds self.name = name self.long_name = long_name def _check(self, x): if not self.check: return minx, maxx = np.min(x), np.max(x) if (minx < self._range[0]) or (maxx > self._range[1]): raise ValueError('Wavelength array [%e, %e] outside of valid range ' 'of the Sellmeier equation [%e, %e].' % (minx, maxx, self._range[0], self._range[1])) def _convert(self, x, unit): if unit != 'wl': x = convert(x, unit, 'wl') self._check(x) if self._scaling != 1.0: x = x * self._scaling return x
MIT License
hunch/hunch-gift-app
django/forms/widgets.py
ClearableFileInput.clear_checkbox_id
python
def clear_checkbox_id(self, name): return name + '_id'
Given the name of the clear checkbox input, return the HTML id for it.
https://github.com/hunch/hunch-gift-app/blob/8c7cad24cc0d9900deb4175e6b768c64a3d7adcf/django/forms/widgets.py#L308-L312
import django.utils.copycompat as copy from itertools import chain from django.conf import settings from django.utils.datastructures import MultiValueDict, MergeDict from django.utils.html import escape, conditional_escape from django.utils.translation import ugettext, ugettext_lazy from django.utils.encoding import StrAndUnicode, force_unicode from django.utils.safestring import mark_safe from django.utils import datetime_safe, formats import time import datetime from util import flatatt from urlparse import urljoin __all__ = ( 'Media', 'MediaDefiningClass', 'Widget', 'TextInput', 'PasswordInput', 'HiddenInput', 'MultipleHiddenInput', 'ClearableFileInput', 'FileInput', 'DateInput', 'DateTimeInput', 'TimeInput', 'Textarea', 'CheckboxInput', 'Select', 'NullBooleanSelect', 'SelectMultiple', 'RadioSelect', 'CheckboxSelectMultiple', 'MultiWidget', 'SplitDateTimeWidget', ) MEDIA_TYPES = ('css','js') class Media(StrAndUnicode): def __init__(self, media=None, **kwargs): if media: media_attrs = media.__dict__ else: media_attrs = kwargs self._css = {} self._js = [] for name in MEDIA_TYPES: getattr(self, 'add_' + name)(media_attrs.get(name, None)) def __unicode__(self): return self.render() def render(self): return mark_safe(u'\n'.join(chain(*[getattr(self, 'render_' + name)() for name in MEDIA_TYPES]))) def render_js(self): return [u'<script type="text/javascript" src="%s"></script>' % self.absolute_path(path) for path in self._js] def render_css(self): media = self._css.keys() media.sort() return chain(*[ [u'<link href="%s" type="text/css" media="%s" rel="stylesheet" />' % (self.absolute_path(path), medium) for path in self._css[medium]] for medium in media]) def absolute_path(self, path): if path.startswith(u'http://') or path.startswith(u'https://') or path.startswith(u'/'): return path return urljoin(settings.MEDIA_URL,path) def __getitem__(self, name): if name in MEDIA_TYPES: return Media(**{str(name): getattr(self, '_' + name)}) raise KeyError('Unknown media type "%s"' % name) def add_js(self, data): if data: for path in data: if path not in self._js: self._js.append(path) def add_css(self, data): if data: for medium, paths in data.items(): for path in paths: if not self._css.get(medium) or path not in self._css[medium]: self._css.setdefault(medium, []).append(path) def __add__(self, other): combined = Media() for name in MEDIA_TYPES: getattr(combined, 'add_' + name)(getattr(self, '_' + name, None)) getattr(combined, 'add_' + name)(getattr(other, '_' + name, None)) return combined def media_property(cls): def _media(self): if hasattr(super(cls, self), 'media'): base = super(cls, self).media else: base = Media() definition = getattr(cls, 'Media', None) if definition: extend = getattr(definition, 'extend', True) if extend: if extend == True: m = base else: m = Media() for medium in extend: m = m + base[medium] return m + Media(definition) else: return Media(definition) else: return base return property(_media) class MediaDefiningClass(type): def __new__(cls, name, bases, attrs): new_class = super(MediaDefiningClass, cls).__new__(cls, name, bases, attrs) if 'media' not in attrs: new_class.media = media_property(new_class) return new_class class Widget(object): __metaclass__ = MediaDefiningClass is_hidden = False needs_multipart_form = False is_localized = False is_required = False def __init__(self, attrs=None): if attrs is not None: self.attrs = attrs.copy() else: self.attrs = {} def __deepcopy__(self, memo): obj = copy.copy(self) obj.attrs = self.attrs.copy() memo[id(self)] = obj return obj def render(self, name, value, attrs=None): raise NotImplementedError def build_attrs(self, extra_attrs=None, **kwargs): attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs def value_from_datadict(self, data, files, name): return data.get(name, None) def _has_changed(self, initial, data): if data is None: data_value = u'' else: data_value = data if initial is None: initial_value = u'' else: initial_value = initial if force_unicode(initial_value) != force_unicode(data_value): return True return False def id_for_label(self, id_): return id_ id_for_label = classmethod(id_for_label) class Input(Widget): input_type = None def _format_value(self, value): if self.is_localized: return formats.localize_input(value) return value def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, type=self.input_type, name=name) if value != '': final_attrs['value'] = force_unicode(self._format_value(value)) return mark_safe(u'<input%s />' % flatatt(final_attrs)) class TextInput(Input): input_type = 'text' class PasswordInput(Input): input_type = 'password' def __init__(self, attrs=None, render_value=False): super(PasswordInput, self).__init__(attrs) self.render_value = render_value def render(self, name, value, attrs=None): if not self.render_value: value=None return super(PasswordInput, self).render(name, value, attrs) class HiddenInput(Input): input_type = 'hidden' is_hidden = True class MultipleHiddenInput(HiddenInput): def __init__(self, attrs=None, choices=()): super(MultipleHiddenInput, self).__init__(attrs) self.choices = choices def render(self, name, value, attrs=None, choices=()): if value is None: value = [] final_attrs = self.build_attrs(attrs, type=self.input_type, name=name) id_ = final_attrs.get('id', None) inputs = [] for i, v in enumerate(value): input_attrs = dict(value=force_unicode(v), **final_attrs) if id_: input_attrs['id'] = '%s_%s' % (id_, i) inputs.append(u'<input%s />' % flatatt(input_attrs)) return mark_safe(u'\n'.join(inputs)) def value_from_datadict(self, data, files, name): if isinstance(data, (MultiValueDict, MergeDict)): return data.getlist(name) return data.get(name, None) class FileInput(Input): input_type = 'file' needs_multipart_form = True def render(self, name, value, attrs=None): return super(FileInput, self).render(name, None, attrs=attrs) def value_from_datadict(self, data, files, name): return files.get(name, None) def _has_changed(self, initial, data): if data is None: return False return True FILE_INPUT_CONTRADICTION = object() class ClearableFileInput(FileInput): initial_text = ugettext_lazy('Currently') input_text = ugettext_lazy('Change') clear_checkbox_label = ugettext_lazy('Clear') template_with_initial = u'%(initial_text)s: %(initial)s %(clear_template)s<br />%(input_text)s: %(input)s' template_with_clear = u'%(clear)s <label for="%(clear_checkbox_id)s">%(clear_checkbox_label)s</label>' def clear_checkbox_name(self, name): return name + '-clear'
MIT License
microsoft/azure-devops-python-api
azure-devops/azure/devops/v6_0/location/location_client.py
LocationClient.get_resource_areas_by_host
python
def get_resource_areas_by_host(self, host_id): query_parameters = {} if host_id is not None: query_parameters['hostId'] = self._serialize.query('host_id', host_id, 'str') response = self._send(http_method='GET', location_id='e81700f7-3be2-46de-8624-2eb35882fcaa', version='6.0-preview.1', query_parameters=query_parameters) return self._deserialize('[ResourceAreaInfo]', self._unwrap_collection(response))
GetResourceAreasByHost. [Preview API] :param str host_id: :rtype: [ResourceAreaInfo]
https://github.com/microsoft/azure-devops-python-api/blob/451cade4c475482792cbe9e522c1fee32393139e/azure-devops/azure/devops/v6_0/location/location_client.py#L110-L123
 from msrest import Serializer, Deserializer from ...client import Client from . import models class LocationClient(Client): def __init__(self, base_url=None, creds=None): super(LocationClient, self).__init__(base_url, creds) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) resource_area_identifier = None def get_connection_data(self, connect_options=None, last_change_id=None, last_change_id64=None): query_parameters = {} if connect_options is not None: query_parameters['connectOptions'] = self._serialize.query('connect_options', connect_options, 'str') if last_change_id is not None: query_parameters['lastChangeId'] = self._serialize.query('last_change_id', last_change_id, 'int') if last_change_id64 is not None: query_parameters['lastChangeId64'] = self._serialize.query('last_change_id64', last_change_id64, 'long') response = self._send(http_method='GET', location_id='00d9565f-ed9c-4a06-9a50-00e7896ccab4', version='6.0-preview.1', query_parameters=query_parameters) return self._deserialize('ConnectionData', response) def get_resource_area(self, area_id, enterprise_name=None, organization_name=None): route_values = {} if area_id is not None: route_values['areaId'] = self._serialize.url('area_id', area_id, 'str') query_parameters = {} if enterprise_name is not None: query_parameters['enterpriseName'] = self._serialize.query('enterprise_name', enterprise_name, 'str') if organization_name is not None: query_parameters['organizationName'] = self._serialize.query('organization_name', organization_name, 'str') response = self._send(http_method='GET', location_id='e81700f7-3be2-46de-8624-2eb35882fcaa', version='6.0-preview.1', route_values=route_values, query_parameters=query_parameters) return self._deserialize('ResourceAreaInfo', response) def get_resource_area_by_host(self, area_id, host_id): route_values = {} if area_id is not None: route_values['areaId'] = self._serialize.url('area_id', area_id, 'str') query_parameters = {} if host_id is not None: query_parameters['hostId'] = self._serialize.query('host_id', host_id, 'str') response = self._send(http_method='GET', location_id='e81700f7-3be2-46de-8624-2eb35882fcaa', version='6.0-preview.1', route_values=route_values, query_parameters=query_parameters) return self._deserialize('ResourceAreaInfo', response) def get_resource_areas(self, enterprise_name=None, organization_name=None): query_parameters = {} if enterprise_name is not None: query_parameters['enterpriseName'] = self._serialize.query('enterprise_name', enterprise_name, 'str') if organization_name is not None: query_parameters['organizationName'] = self._serialize.query('organization_name', organization_name, 'str') response = self._send(http_method='GET', location_id='e81700f7-3be2-46de-8624-2eb35882fcaa', version='6.0-preview.1', query_parameters=query_parameters) return self._deserialize('[ResourceAreaInfo]', self._unwrap_collection(response))
MIT License
elektronn/elektronn3
elektronn3/modules/lovasz_losses.py
flatten_probas
python
def flatten_probas(probas, labels, ignore=None): C = probas.shape[1] if probas.dim() == 4: probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C) elif probas.dim() == 5: probas = probas.permute(0, 2, 3, 4, 1).contiguous().view(-1, C) labels = labels.view(-1) if ignore is None: return probas, labels valid = (labels != ignore) vprobas = probas[valid.nonzero().squeeze()] vlabels = labels[valid] return vprobas, vlabels
Flattens predictions in the batch
https://github.com/elektronn/elektronn3/blob/19c751855dffc67b744cd43e757aa4a5bd577d9b/elektronn3/modules/lovasz_losses.py#L206-L221
from __future__ import print_function, division import torch from torch.autograd import Variable import torch.nn.functional as F import numpy as np try: from itertools import ifilterfalse except ImportError: from itertools import filterfalse eps = 0.0001 def lovasz_grad(gt_sorted): p = len(gt_sorted) gts = gt_sorted.sum() intersection = gts - gt_sorted.float().cumsum(0) union = gts + (1 - gt_sorted).float().cumsum(0) jaccard = 1. - intersection / (union + eps) if p > 1: jaccard[1:p] = jaccard[1:p] - jaccard[0:-1] return jaccard def iou_binary(preds, labels, EMPTY=1., ignore=None, per_image=True): if not per_image: preds, labels = (preds,), (labels,) ious = [] for pred, label in zip(preds, labels): intersection = ((label == 1) & (pred == 1)).sum() union = ((label == 1) | ((pred == 1) & (label != ignore))).sum() if not union: iou = EMPTY else: iou = float(intersection) / (union + eps) ious.append(iou) iou = mean(ious) return 100 * iou def iou(preds, labels, C, EMPTY=1., ignore=None, per_image=False): if not per_image: preds, labels = (preds,), (labels,) ious = [] for pred, label in zip(preds, labels): iou = [] for i in range(C): if i != ignore: intersection = ((label == i) & (pred == i)).sum() union = ((label == i) | ((pred == i) & (label != ignore))).sum() if not union: iou.append(EMPTY) else: iou.append(float(intersection) / (union + eps)) ious.append(iou) ious = map(mean, zip(*ious)) return 100 * np.array(ious) def lovasz_hinge(logits, labels, per_image=True, ignore=None): if per_image: loss = mean(lovasz_hinge_flat(*flatten_binary_scores(log.unsqueeze(0), lab.unsqueeze(0), ignore)) for log, lab in zip(logits, labels)) else: loss = lovasz_hinge_flat(*flatten_binary_scores(logits, labels, ignore)) return loss def lovasz_hinge_flat(logits, labels): if len(labels) == 0: return logits.sum() * 0. signs = 2. * labels.float() - 1. errors = (1. - logits * Variable(signs)) errors_sorted, perm = torch.sort(errors, dim=0, descending=True) perm = perm.data gt_sorted = labels[perm] grad = lovasz_grad(gt_sorted) loss = torch.dot(F.relu(errors_sorted), Variable(grad)) return loss def flatten_binary_scores(scores, labels, ignore=None): scores = scores.view(-1) labels = labels.view(-1) if ignore is None: return scores, labels valid = (labels != ignore) vscores = scores[valid] vlabels = labels[valid] return vscores, vlabels class StableBCELoss(torch.nn.modules.Module): def __init__(self): super(StableBCELoss, self).__init__() def forward(self, input, target): neg_abs = - input.abs() loss = input.clamp(min=0) - input * target + (1 + neg_abs.exp()).log() return loss.mean() def binary_xloss(logits, labels, ignore=None): logits, labels = flatten_binary_scores(logits, labels, ignore) loss = StableBCELoss()(logits, Variable(labels.float())) return loss def lovasz_softmax(probas, labels, only_present=False, per_image=False, ignore=None): if per_image: loss = mean(lovasz_softmax_flat(*flatten_probas(prob.unsqueeze(0), lab.unsqueeze(0), ignore), only_present=only_present) for prob, lab in zip(probas, labels)) else: loss = lovasz_softmax_flat(*flatten_probas(probas, labels, ignore), only_present=only_present) return loss def lovasz_softmax_flat(probas, labels, only_present=False): C = probas.size(1) losses = [] for c in range(C): fg = (labels == c).float() if only_present and fg.sum() == 0: continue errors = (Variable(fg) - probas[:, c]).abs() errors_sorted, perm = torch.sort(errors, 0, descending=True) perm = perm.data fg_sorted = fg[perm] losses.append(torch.dot(errors_sorted, Variable(lovasz_grad(fg_sorted)))) return mean(losses)
MIT License
openshift/origin-ci-tool
oct/ansible/openshift-ansible/callback_plugins/openshift_quick_installer.py
CallbackModule.v2_runner_item_on_skipped
python
def v2_runner_item_on_skipped(self, result): if C.DISPLAY_SKIPPED_HOSTS: msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), self._get_item(result._result)) if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result: msg += " => %s" % self._dump_results(result._result) self._display.display(msg, color=C.COLOR_SKIP, log_only=True)
Print out task results when an item is skipped
https://github.com/openshift/origin-ci-tool/blob/95a472392910104fc805c4ab8af39791105c956d/oct/ansible/openshift-ansible/callback_plugins/openshift_quick_installer.py#L231-L237
from __future__ import (absolute_import, print_function) import imp import os import sys from ansible import constants as C from ansible.utils.color import colorize, hostcolor ANSIBLE_PATH = imp.find_module('ansible')[1] DEFAULT_PATH = os.path.join(ANSIBLE_PATH, 'plugins/callback/default.py') DEFAULT_MODULE = imp.load_source( 'ansible.plugins.callback.default', DEFAULT_PATH ) try: from ansible.plugins.callback import CallbackBase BASECLASS = CallbackBase except ImportError: BASECLASS = DEFAULT_MODULE.CallbackModule reload(sys) sys.setdefaultencoding('utf-8') class CallbackModule(DEFAULT_MODULE.CallbackModule): CALLBACK_VERSION = 2.2 CALLBACK_TYPE = 'stdout' CALLBACK_NAME = 'openshift_quick_installer' CALLBACK_NEEDS_WHITELIST = False plays_count = 0 plays_total_ran = 0 def banner(self, msg, color=None): msg = msg.strip() star_len = (79 - len(msg)) if star_len < 0: star_len = 3 stars = "*" * star_len self._display.display("\n%s %s" % (msg, stars), color=color, log_only=True) def v2_playbook_on_start(self, playbook): self.plays_count = len(playbook.get_plays()) self.plays_total_ran = 0 if self._display.verbosity > 1: from os.path import basename self.banner("PLAYBOOK: %s" % basename(playbook._file_name)) def v2_playbook_on_play_start(self, play): self.plays_total_ran += 1 print("") print("Play %s/%s (%s)" % (self.plays_total_ran, self.plays_count, play.get_name())) name = play.get_name().strip() if not name: msg = "PLAY" else: msg = "PLAY [%s]" % name self._play = play self.banner(msg) def v2_playbook_on_task_start(self, task, is_conditional): sys.stdout.write('.') args = '' if not task.no_log and C.DISPLAY_ARGS_TO_STDOUT: args = ', '.join(('%s=%s' % a for a in task.args.items())) args = ' %s' % args self.banner("TASK [%s%s]" % (task.get_name().strip(), args)) if self._display.verbosity >= 2: path = task.get_path() if path: self._display.display("task path: %s" % path, color=C.COLOR_DEBUG, log_only=True) def v2_playbook_on_handler_task_start(self, task): sys.stdout.write('.') self.banner("RUNNING HANDLER [%s]" % task.get_name().strip()) def v2_playbook_on_cleanup_task_start(self, task): sys.stdout.write('.') self.banner("CLEANUP TASK [%s]" % task.get_name().strip()) def v2_playbook_on_include(self, included_file): msg = 'included: %s for %s' % (included_file._filename, ", ".join([h.name for h in included_file._hosts])) self._display.display(msg, color=C.COLOR_SKIP, log_only=True) def v2_runner_on_ok(self, result): delegated_vars = result._result.get('_ansible_delegated_vars', None) self._clean_results(result._result, result._task.action) if result._task.action in ('include', 'include_role'): return elif result._result.get('changed', False): if delegated_vars: msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) else: msg = "changed: [%s]" % result._host.get_name() color = C.COLOR_CHANGED else: if delegated_vars: msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) else: msg = "ok: [%s]" % result._host.get_name() color = C.COLOR_OK if result._task.loop and 'results' in result._result: self._process_items(result) else: if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result: msg += " => %s" % (self._dump_results(result._result),) self._display.display(msg, color=color, log_only=True) self._handle_warnings(result._result) def v2_runner_item_on_ok(self, result): delegated_vars = result._result.get('_ansible_delegated_vars', None) if result._task.action in ('include', 'include_role'): return elif result._result.get('changed', False): msg = 'changed' color = C.COLOR_CHANGED else: msg = 'ok' color = C.COLOR_OK if delegated_vars: msg += ": [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host']) else: msg += ": [%s]" % result._host.get_name() msg += " => (item=%s)" % (self._get_item(result._result),) if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result: msg += " => %s" % self._dump_results(result._result) self._display.display(msg, color=color, log_only=True)
Apache License 2.0
jkwill87/teletype
teletype/io/common.py
hide_cursor
python
def hide_cursor(): print(codes.CURSOR["hide"], end="") stdout.flush()
Hides the cursor indicator; remember to call show_cursor before exiting
https://github.com/jkwill87/teletype/blob/e7290ed53f8e631c798c438607c0b6211c4aaab4/teletype/io/common.py#L67-L71
from __future__ import print_function from re import sub from sys import stdout from teletype import codes try: input = raw_input except NameError: pass __all__ = [ "erase_lines", "erase_screen", "hide_cursor", "move_cursor", "show_cursor", "strip_format", "style_format", "style_print", "style_input", ] def erase_lines(n=1): for _ in range(n): print(codes.CURSOR["up"], end="") print(codes.CURSOR["eol"], end="") stdout.flush() def erase_screen(): print(codes.CURSOR["clear"], end="") stdout.flush() def move_cursor(cols=0, rows=0): if cols == 0 and rows == 0: return commands = "" commands += codes.CURSOR["up" if rows < 0 else "down"] * abs(rows) commands += codes.CURSOR["left" if cols < 0 else "right"] * abs(cols) if commands: print(commands, end="") stdout.flush() def show_cursor(): print(codes.CURSOR["show"], end="") stdout.flush()
MIT License
threatconnect-inc/tcex
tcex/app_config_object/permutations.py
Permutations.db_insert_record
python
def db_insert_record(self, table_name, columns): bindings = ('?,' * len(columns)).strip(',') values = [None] * len(columns) try: sql = f"INSERT INTO {table_name} ({', '.join(columns)}) VALUES ({bindings})" cur = self.db_conn.cursor() cur.execute(sql, values) except sqlite3.OperationalError as e: raise RuntimeError(f'SQL insert failed - SQL: "{sql}", Error: "{e}"')
Insert records into DB. Args: table_name (str): The name of the table. columns (list): List of columns for insert statement.
https://github.com/threatconnect-inc/tcex/blob/dae37b73d8b33cf26360f6d25c6b305a68f2f0e2/tcex/app_config_object/permutations.py#L156-L170
import json import logging import os import random import sys try: import sqlite3 except ImportError: pass from .install_json import InstallJson from .layout_json import LayoutJson class Permutations: def __init__(self, logger=None): self.log = logger or logging.getLogger('permutations') self._db_conn = None self._input_names = None self._input_permutations = None self._output_permutations = None self.app_path = os.getcwd() self.ij = InstallJson() self.lj = LayoutJson() self.input_table = 'inputs' def _gen_permutations(self, index=0, args=None): if args is None: args = [] try: hidden = False if self.ij.runtime_level.lower() in [ 'playbook', 'triggerservice', 'webhooktriggerservice', ]: name = list(self.lj.parameters_names)[index] display = self.lj.params_dict.get(name, {}).get('display') hidden = self.lj.params_dict.get(name, {}).get('hidden', False) else: name = list(self.ij.params_dict.keys())[index] display = False input_type = self.ij.params_dict.get(name, {}).get('type') if input_type is None: self.handle_error(f'No value found in install.json for "{name}".') if ( self.ij.runtime_level.lower() == 'organization' or self.validate_layout_display(self.input_table, display) or hidden ): if input_type.lower() == 'boolean': for val in [True, False]: args.append({'name': name, 'value': val}) self.db_update_record(self.input_table, name, val) self._gen_permutations(index + 1, list(args)) args.pop() elif input_type.lower() == 'choice': valid_values = self.ij.expand_valid_values( self.ij.params_dict.get(name, {}).get('validValues', []) ) for val in valid_values: args.append({'name': name, 'value': val}) self.db_update_record(self.input_table, name, val) self._gen_permutations(index + 1, list(args)) args.pop() else: args.append({'name': name, 'value': None}) self._gen_permutations(index + 1, list(args)) else: self._gen_permutations(index + 1, list(args)) except IndexError: self._input_permutations.append(args) outputs = [] for output_data in self.ij.output_variables: name = output_data.get('name') if self.lj.outputs_dict.get(name) is not None: display = self.lj.outputs_dict.get(name, {}).get('display') valid = self.validate_layout_display(self.input_table, display) if display is None or not valid: continue outputs.append(output_data) self._output_permutations.append(outputs) @property def db_conn(self): if self._db_conn is None: try: self._db_conn = sqlite3.connect(':memory:') except sqlite3.Error as e: self.handle_error(e) return self._db_conn def db_create_table(self, table_name, columns): formatted_columns = '' for col in set(columns): formatted_columns += f""""{col.strip('"').strip("'")}" text, """ formatted_columns = formatted_columns.strip(', ') create_table_sql = f'CREATE TABLE IF NOT EXISTS {table_name} ({formatted_columns});' try: cr = self.db_conn.cursor() cr.execute(create_table_sql) except sqlite3.Error as e: self.handle_error(e) def db_drop_table(self, table_name): create_table_sql = f'DROP TABLE IF EXISTS {table_name};' try: cr = self.db_conn.cursor() cr.execute(create_table_sql) except sqlite3.Error as e: self.handle_error(e)
Apache License 2.0
consensys/mythril
mythril/laser/smt/bool.py
is_false
python
def is_false(a: Bool) -> bool: return z3.is_false(a.raw)
Returns whether the provided bool can be simplified to false. :param a: :return:
https://github.com/consensys/mythril/blob/df1d4dd0ebbb623054f4708717664dc6e27f76b9/mythril/laser/smt/bool.py#L126-L132
from typing import Union, cast, List, Set import z3 from mythril.laser.smt.expression import Expression class Bool(Expression[z3.BoolRef]): @property def is_false(self) -> bool: self.simplify() return z3.is_false(self.raw) @property def is_true(self) -> bool: self.simplify() return z3.is_true(self.raw) @property def value(self) -> Union[bool, None]: self.simplify() if self.is_true: return True elif self.is_false: return False else: return None def __eq__(self, other: object) -> "Bool": if isinstance(other, Expression): return Bool(cast(z3.BoolRef, self.raw == other.raw), self.annotations.union(other.annotations)) return Bool(cast(z3.BoolRef, self.raw == other), self.annotations) def __ne__(self, other: object) -> "Bool": if isinstance(other, Expression): return Bool(cast(z3.BoolRef, self.raw != other.raw), self.annotations.union(other.annotations)) return Bool(cast(z3.BoolRef, self.raw != other), self.annotations) def __bool__(self) -> bool: if self.value is not None: return self.value else: return False def __hash__(self) -> int: return self.raw.__hash__() def And(*args: Union[Bool, bool]) -> Bool: annotations = set() args_list = [arg if isinstance(arg, Bool) else Bool(arg) for arg in args] for arg in args_list: annotations = annotations.union(arg.annotations) return Bool(z3.And([a.raw for a in args_list]), annotations) def Xor(a: Bool, b: Bool) -> Bool: union = a.annotations.union(b.annotations) return Bool(z3.Xor(a.raw, b.raw), union) def Or(*args: Union[Bool, bool]) -> Bool: args_list = [arg if isinstance(arg, Bool) else Bool(arg) for arg in args] annotations = set() for arg in args_list: annotations = annotations.union(arg.annotations) return Bool(z3.Or([a.raw for a in args_list]), annotations=annotations) def Not(a: Bool) -> Bool: return Bool(z3.Not(a.raw), a.annotations)
MIT License
andreafioraldi/angrdbg
angrdbg/page_8.py
SimDbgMemory.contains_no_backer
python
def contains_no_backer(self, addr): for i, p in self._pages.items(): if i * self._page_size <= addr < (i + 1) * self._page_size: return addr - (i * self._page_size) in p.keys() return False
Tests if the address is contained in any page of paged memory, without considering memory backers. :param int addr: The address to test. :return: True if the address is included in one of the pages, False otherwise. :rtype: bool
https://github.com/andreafioraldi/angrdbg/blob/59e130bbd1c64fcb9af6fbc2feddfa781c0b3036/angrdbg/page_8.py#L426-L438
import cooldict import cffi import cle from angr.errors import SimMemoryError, SimSegfaultError, SimMemoryMissingError from angr import sim_options as options from angr.storage.memory_object import SimMemoryObject from claripy.ast.bv import BV _ffi = cffi.FFI() import logging l = logging.getLogger("angrdbg.page") from angr.storage import paged_memory import claripy from .context import load_project, get_memory_type, get_debugger, USE_CLE_MEMORY from .abstract_debugger import SEG_PROT_R, SEG_PROT_W, SEG_PROT_X class DbgPage(paged_memory.BasePage): def __init__(self, *args, **kwargs): storage = kwargs.pop("storage", None) self._sinkhole = kwargs.pop("sinkhole", None) super(DbgPage, self).__init__(*args, **kwargs) self._storage = [None] * self._page_size if storage is None else storage def keys(self): if self._sinkhole is not None: return range(self._page_addr, self._page_addr + self._page_size) else: return [ self._page_addr + i for i, v in enumerate( self._storage) if v is not None] def replace_mo(self, state, old_mo, new_mo): if self._sinkhole is old_mo: self._sinkhole = new_mo else: start, end = self._resolve_range(old_mo) for i in range(start, end): if self._storage[i - self._page_addr] is old_mo: self._storage[i - self._page_addr] = new_mo def store_overwrite(self, state, new_mo, start, end): if start == self._page_addr and end == self._page_addr + self._page_size: self._sinkhole = new_mo self._storage = [None] * self._page_size else: for i in range(start, end): self._storage[i - self._page_addr] = new_mo def store_underwrite(self, state, new_mo, start, end): if start == self._page_addr and end == self._page_addr + self._page_size: self._sinkhole = new_mo else: for i in range(start, end): if self._storage[i - self._page_addr] is None: self._storage[i - self._page_addr] = new_mo def load_mo(self, state, page_idx): mo = self._storage[page_idx - self._page_addr] if mo is None and hasattr(self, "from_dbg"): byte_val = get_debugger().get_byte(page_idx) mo = SimMemoryObject(claripy.BVV(byte_val, 8), page_idx) self._storage[page_idx - self._page_addr] = mo return mo def load_slice(self, state, start, end): items = [] if start > self._page_addr + self._page_size or end < self._page_addr: l.warning("Calling load_slice on the wrong page.") return items for addr in range(max(start, self._page_addr), min( end, self._page_addr + self._page_size)): i = addr - self._page_addr mo = self._storage[i] if mo is None and hasattr(self, "from_dbg"): byte_val = get_debugger().get_byte(addr) mo = SimMemoryObject(claripy.BVV(byte_val, 8), addr) self._storage[i] = mo if mo is not None and (not items or items[-1][1] is not mo): items.append((addr, mo)) return items def _copy_args(self): return {'storage': list(self._storage), 'sinkhole': self._sinkhole} def copy(self): c = DbgPage( self._page_addr, self._page_size, permissions=self.permissions, **self._copy_args()) if hasattr(self, "from_dbg"): setattr(c, "from_dbg", True) return c class SimDbgMemory(object): def __init__(self, memory_backer=None, permissions_backer=None, pages=None, initialized=None, name_mapping=None, hash_mapping=None, page_size=None, symbolic_addrs=None, check_permissions=False): self._cowed = set() self._memory_backer = { } if memory_backer is None else memory_backer self._permissions_backer = permissions_backer self._executable_pages = False if permissions_backer is None else permissions_backer[0] self._permission_map = { } if permissions_backer is None else permissions_backer[1] self._pages = { } if pages is None else pages self._initialized = set() if initialized is None else initialized self._page_size = 0x1000 if page_size is None else page_size self._symbolic_addrs = dict() if symbolic_addrs is None else symbolic_addrs self.state = None self._preapproved_stack = range(0) self._check_perms = check_permissions self._name_mapping = cooldict.BranchingDict() if name_mapping is None else name_mapping self._hash_mapping = cooldict.BranchingDict() if hash_mapping is None else hash_mapping self._updated_mappings = set() def __getstate__(self): return { '_memory_backer': self._memory_backer, '_permissions_backer': self._permissions_backer, '_executable_pages': self._executable_pages, '_permission_map': self._permission_map, '_pages': self._pages, '_initialized': self._initialized, '_page_size': self._page_size, 'state': None, '_name_mapping': self._name_mapping, '_hash_mapping': self._hash_mapping, '_symbolic_addrs': self._symbolic_addrs, '_preapproved_stack': self._preapproved_stack, '_check_perms': self._check_perms } def __setstate__(self, s): self._cowed = set() self.__dict__.update(s) def branch(self): new_name_mapping = self._name_mapping.branch() if options.REVERSE_MEMORY_NAME_MAP in self.state.options else self._name_mapping new_hash_mapping = self._hash_mapping.branch() if options.REVERSE_MEMORY_HASH_MAP in self.state.options else self._hash_mapping new_pages = dict(self._pages) self._cowed = set() m = SimDbgMemory(memory_backer=self._memory_backer, permissions_backer=self._permissions_backer, pages=new_pages, initialized=set(self._initialized), page_size=self._page_size, name_mapping=new_name_mapping, hash_mapping=new_hash_mapping, symbolic_addrs=dict(self._symbolic_addrs), check_permissions=self._check_perms) m._preapproved_stack = self._preapproved_stack return m def __getitem__(self, addr): page_num = addr // self._page_size page_idx = addr try: v = self._get_page(page_num).load_mo(self.state, page_idx) return v except KeyError: raise KeyError(addr) def __setitem__(self, addr, v): page_num = addr // self._page_size page_idx = addr self._get_page(page_num, write=True, create=True)[page_idx] = v self._update_mappings(addr, v.object) def __delitem__(self, addr): raise Exception("For performance reasons, deletion is not supported. Contact Yan if this needs to change.") @property def allow_segv(self): return self._check_perms and not self.state.scratch.priv and options.STRICT_PAGE_ACCESS in self.state.options @property def byte_width(self): return self.state.arch.byte_width if self.state is not None else 8 def load_objects(self, addr, num_bytes, ret_on_segv=False): result = [ ] end = addr + num_bytes for page_addr in self._containing_pages(addr, end): try: page = self._get_page(page_addr // self._page_size) except KeyError: if self.allow_segv: if ret_on_segv: break raise SimSegfaultError(addr, 'read-miss') else: continue if self.allow_segv and not page.concrete_permissions & DbgPage.PROT_READ: if ret_on_segv: break raise SimSegfaultError(addr, 'non-readable') result.extend(page.load_slice(self.state, addr, end)) return result def _create_page(self, page_num, permissions=None): return DbgPage( page_num*self._page_size, self._page_size, executable=self._executable_pages, permissions=permissions ) def _initialize_page(self, n, new_page): if n in self._initialized: return False self._initialized.add(n) new_page_addr = n * self._page_size initialized = False if self.state is not None: self.state.scratch.push_priv(True) project = load_project() debugger = get_debugger() seg = None try: seg = debugger.seg_by_addr(new_page_addr) except BaseException: pass try: if get_memory_type() == USE_CLE_MEMORY: if isinstance(self._memory_backer, cle.Clemory): for start, end in self._permission_map: if start <= new_page_addr < end: flags = self._permission_map[(start, end)] new_page.permissions = claripy.BVV(flags, 3) break for backer_addr, backer in self._memory_backer.backers(new_page_addr): if backer_addr >= new_page_addr + self._page_size: break relevant_region_start = max(new_page_addr, backer_addr) relevant_region_end = min(new_page_addr + self._page_size, backer_addr + len(backer)) slice_start = relevant_region_start - backer_addr slice_end = relevant_region_end - backer_addr if self.byte_width == 8: relevant_data = bytes(memoryview(backer)[slice_start:slice_end]) mo = SimMemoryObject( claripy.BVV(relevant_data), relevant_region_start, byte_width=self.byte_width) self._apply_object_to_page(new_page_addr, mo, page=new_page) else: for i, byte in enumerate(backer[slice_start:slice_end]): mo = SimMemoryObject(claripy.BVV(byte, self.byte_width), relevant_region_start + i, byte_width=self.byte_width) self._apply_object_to_page(new_page_addr, mo, page=new_page) initialized = True elif len(self._memory_backer) <= self._page_size: for i in self._memory_backer: if new_page_addr <= i <= new_page_addr + self._page_size: if isinstance(self._memory_backer[i], claripy.ast.Base): backer = self._memory_backer[i] elif isinstance(self._memory_backer[i], bytes): backer = claripy.BVV(self._memory_backer[i]) else: backer = claripy.BVV(self._memory_backer[i], self.byte_width) mo = SimMemoryObject(backer, i, byte_width=self.byte_width) self._apply_object_to_page(n*self._page_size, mo, page=new_page) initialized = True elif len(self._memory_backer) > self._page_size: for i in range(self._page_size): try: if isinstance(self._memory_backer[i], claripy.ast.Base): backer = self._memory_backer[i] elif isinstance(self._memory_backer[i], bytes): backer = claripy.BVV(self._memory_backer[i]) else: backer = claripy.BVV(self._memory_backer[i], self.byte_width) mo = SimMemoryObject(backer, new_page_addr+i, byte_width=self.byte_width) self._apply_object_to_page(n*self._page_size, mo, page=new_page) initialized = True except KeyError: pass except Exception as ee: print(ee) try: if seg is not None: perms = 0 if seg.perms & SEG_PROT_X: perms += DbgPage.PROT_EXEC if seg.perms & SEG_PROT_W: perms += DbgPage.PROT_WRITE if seg.perms & SEG_PROT_R: perms += DbgPage.PROT_READ new_page.permissions = claripy.BVV(perms, 3) initialized = True setattr(new_page, "from_dbg", True) except Exception as ee: import traceback traceback.print_exc() if self.state is not None: self.state.scratch.pop_priv() return initialized def _get_page(self, page_num, write=False, create=False, initialize=True): page_addr = page_num * self._page_size try: page = self._pages[page_num] except KeyError: if not (initialize or create or page_addr in self._preapproved_stack): raise page = self._create_page(page_num) self._symbolic_addrs[page_num] = set() if initialize: initialized = self._initialize_page(page_num, page) if not initialized and not create and page_addr not in self._preapproved_stack: raise self._pages[page_num] = page self._cowed.add(page_num) return page if write and page_num not in self._cowed: page = page.copy() self._symbolic_addrs[page_num] = set(self._symbolic_addrs[page_num]) self._cowed.add(page_num) self._pages[page_num] = page return page def __contains__(self, addr): try: return self.__getitem__(addr) is not None except KeyError: return False
BSD 2-Clause Simplified License
helios-protocol/py-helios-node
hvm/utils/env.py
env_int
python
def env_int(name: str, required: bool=False, default: Union[Type[empty], int]=empty) -> int: value = get_env_value(name, required=required, default=default) if value is empty: raise ValueError( "`env_int` requires either a default value to be specified, or for " "the variable to be present in the environment" ) return int(value)
Pulls an environment variable out of the environment and casts it to an integer. If the name is not present in the environment and no default is specified then a ``ValueError`` will be raised. Similarly, if the environment value is not castable to an integer, a ``ValueError`` will be raised. :param name: The name of the environment variable be pulled :type name: str :param required: Whether the environment variable is required. If ``True`` and the variable is not present, a ``KeyError`` is raised. :type required: bool :param default: The value to return if the environment variable is not present. (Providing a default alongside setting ``required=True`` will raise a ``ValueError``) :type default: bool
https://github.com/helios-protocol/py-helios-node/blob/691b378938f0a36bf8774dc1ee4e4370b6cf7c63/hvm/utils/env.py#L59-L84
import os from typing import ( Any, Type, Iterable, List, Union, TypeVar, Dict, Callable ) TRUE_VALUES = set(( True, 'True', 'true', )) class empty(object): pass def get_env_value(name: str, required: bool=False, default: Any=empty) -> str: if required and default is not empty: raise ValueError("Using `default` with `required=True` is invalid") elif required: try: value = os.environ[name] except KeyError: raise KeyError( "Must set environment variable {0}".format(name) ) else: value = os.environ.get(name, default) return value
MIT License
aspose-words-cloud/aspose-words-cloud-python
asposewordscloud/models/page_setup.py
PageSetup.page_starting_number
python
def page_starting_number(self, page_starting_number): self._page_starting_number = page_starting_number
Sets the page_starting_number of this PageSetup. Gets or sets the starting page number of the section. # noqa: E501 :param page_starting_number: The page_starting_number of this PageSetup. # noqa: E501 :type: int
https://github.com/aspose-words-cloud/aspose-words-cloud-python/blob/abf8fccfed40aa2b09c6cdcaf3f2723e1f412d85/asposewordscloud/models/page_setup.py#L699-L707
import pprint import re import datetime import six import json class PageSetup(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'link': 'WordsApiLink', 'bidi': 'bool', 'border_always_in_front': 'bool', 'border_applies_to': 'str', 'border_distance_from': 'str', 'bottom_margin': 'float', 'different_first_page_header_footer': 'bool', 'first_page_tray': 'int', 'footer_distance': 'float', 'gutter': 'float', 'header_distance': 'float', 'left_margin': 'float', 'line_number_count_by': 'int', 'line_number_distance_from_text': 'float', 'line_number_restart_mode': 'str', 'line_starting_number': 'int', 'orientation': 'str', 'other_pages_tray': 'int', 'page_height': 'float', 'page_number_style': 'str', 'page_starting_number': 'int', 'page_width': 'float', 'paper_size': 'str', 'restart_page_numbering': 'bool', 'right_margin': 'float', 'rtl_gutter': 'bool', 'section_start': 'str', 'suppress_endnotes': 'bool', 'top_margin': 'float', 'vertical_alignment': 'str' } attribute_map = { 'link': 'Link', 'bidi': 'Bidi', 'border_always_in_front': 'BorderAlwaysInFront', 'border_applies_to': 'BorderAppliesTo', 'border_distance_from': 'BorderDistanceFrom', 'bottom_margin': 'BottomMargin', 'different_first_page_header_footer': 'DifferentFirstPageHeaderFooter', 'first_page_tray': 'FirstPageTray', 'footer_distance': 'FooterDistance', 'gutter': 'Gutter', 'header_distance': 'HeaderDistance', 'left_margin': 'LeftMargin', 'line_number_count_by': 'LineNumberCountBy', 'line_number_distance_from_text': 'LineNumberDistanceFromText', 'line_number_restart_mode': 'LineNumberRestartMode', 'line_starting_number': 'LineStartingNumber', 'orientation': 'Orientation', 'other_pages_tray': 'OtherPagesTray', 'page_height': 'PageHeight', 'page_number_style': 'PageNumberStyle', 'page_starting_number': 'PageStartingNumber', 'page_width': 'PageWidth', 'paper_size': 'PaperSize', 'restart_page_numbering': 'RestartPageNumbering', 'right_margin': 'RightMargin', 'rtl_gutter': 'RtlGutter', 'section_start': 'SectionStart', 'suppress_endnotes': 'SuppressEndnotes', 'top_margin': 'TopMargin', 'vertical_alignment': 'VerticalAlignment' } def __init__(self, link=None, bidi=None, border_always_in_front=None, border_applies_to=None, border_distance_from=None, bottom_margin=None, different_first_page_header_footer=None, first_page_tray=None, footer_distance=None, gutter=None, header_distance=None, left_margin=None, line_number_count_by=None, line_number_distance_from_text=None, line_number_restart_mode=None, line_starting_number=None, orientation=None, other_pages_tray=None, page_height=None, page_number_style=None, page_starting_number=None, page_width=None, paper_size=None, restart_page_numbering=None, right_margin=None, rtl_gutter=None, section_start=None, suppress_endnotes=None, top_margin=None, vertical_alignment=None): self._link = None self._bidi = None self._border_always_in_front = None self._border_applies_to = None self._border_distance_from = None self._bottom_margin = None self._different_first_page_header_footer = None self._first_page_tray = None self._footer_distance = None self._gutter = None self._header_distance = None self._left_margin = None self._line_number_count_by = None self._line_number_distance_from_text = None self._line_number_restart_mode = None self._line_starting_number = None self._orientation = None self._other_pages_tray = None self._page_height = None self._page_number_style = None self._page_starting_number = None self._page_width = None self._paper_size = None self._restart_page_numbering = None self._right_margin = None self._rtl_gutter = None self._section_start = None self._suppress_endnotes = None self._top_margin = None self._vertical_alignment = None self.discriminator = None if link is not None: self.link = link if bidi is not None: self.bidi = bidi if border_always_in_front is not None: self.border_always_in_front = border_always_in_front if border_applies_to is not None: self.border_applies_to = border_applies_to if border_distance_from is not None: self.border_distance_from = border_distance_from if bottom_margin is not None: self.bottom_margin = bottom_margin if different_first_page_header_footer is not None: self.different_first_page_header_footer = different_first_page_header_footer if first_page_tray is not None: self.first_page_tray = first_page_tray if footer_distance is not None: self.footer_distance = footer_distance if gutter is not None: self.gutter = gutter if header_distance is not None: self.header_distance = header_distance if left_margin is not None: self.left_margin = left_margin if line_number_count_by is not None: self.line_number_count_by = line_number_count_by if line_number_distance_from_text is not None: self.line_number_distance_from_text = line_number_distance_from_text if line_number_restart_mode is not None: self.line_number_restart_mode = line_number_restart_mode if line_starting_number is not None: self.line_starting_number = line_starting_number if orientation is not None: self.orientation = orientation if other_pages_tray is not None: self.other_pages_tray = other_pages_tray if page_height is not None: self.page_height = page_height if page_number_style is not None: self.page_number_style = page_number_style if page_starting_number is not None: self.page_starting_number = page_starting_number if page_width is not None: self.page_width = page_width if paper_size is not None: self.paper_size = paper_size if restart_page_numbering is not None: self.restart_page_numbering = restart_page_numbering if right_margin is not None: self.right_margin = right_margin if rtl_gutter is not None: self.rtl_gutter = rtl_gutter if section_start is not None: self.section_start = section_start if suppress_endnotes is not None: self.suppress_endnotes = suppress_endnotes if top_margin is not None: self.top_margin = top_margin if vertical_alignment is not None: self.vertical_alignment = vertical_alignment @property def link(self): return self._link @link.setter def link(self, link): self._link = link @property def bidi(self): return self._bidi @bidi.setter def bidi(self, bidi): self._bidi = bidi @property def border_always_in_front(self): return self._border_always_in_front @border_always_in_front.setter def border_always_in_front(self, border_always_in_front): self._border_always_in_front = border_always_in_front @property def border_applies_to(self): return self._border_applies_to @border_applies_to.setter def border_applies_to(self, border_applies_to): allowed_values = ["AllPages", "FirstPage", "OtherPages"] if not border_applies_to.isdigit(): if border_applies_to not in allowed_values: raise ValueError( "Invalid value for `border_applies_to` ({0}), must be one of {1}" .format(border_applies_to, allowed_values)) self._border_applies_to = border_applies_to else: self._border_applies_to = allowed_values[int(border_applies_to) if six.PY3 else long(border_applies_to)] @property def border_distance_from(self): return self._border_distance_from @border_distance_from.setter def border_distance_from(self, border_distance_from): allowed_values = ["Text", "PageEdge"] if not border_distance_from.isdigit(): if border_distance_from not in allowed_values: raise ValueError( "Invalid value for `border_distance_from` ({0}), must be one of {1}" .format(border_distance_from, allowed_values)) self._border_distance_from = border_distance_from else: self._border_distance_from = allowed_values[int(border_distance_from) if six.PY3 else long(border_distance_from)] @property def bottom_margin(self): return self._bottom_margin @bottom_margin.setter def bottom_margin(self, bottom_margin): self._bottom_margin = bottom_margin @property def different_first_page_header_footer(self): return self._different_first_page_header_footer @different_first_page_header_footer.setter def different_first_page_header_footer(self, different_first_page_header_footer): self._different_first_page_header_footer = different_first_page_header_footer @property def first_page_tray(self): return self._first_page_tray @first_page_tray.setter def first_page_tray(self, first_page_tray): self._first_page_tray = first_page_tray @property def footer_distance(self): return self._footer_distance @footer_distance.setter def footer_distance(self, footer_distance): self._footer_distance = footer_distance @property def gutter(self): return self._gutter @gutter.setter def gutter(self, gutter): self._gutter = gutter @property def header_distance(self): return self._header_distance @header_distance.setter def header_distance(self, header_distance): self._header_distance = header_distance @property def left_margin(self): return self._left_margin @left_margin.setter def left_margin(self, left_margin): self._left_margin = left_margin @property def line_number_count_by(self): return self._line_number_count_by @line_number_count_by.setter def line_number_count_by(self, line_number_count_by): self._line_number_count_by = line_number_count_by @property def line_number_distance_from_text(self): return self._line_number_distance_from_text @line_number_distance_from_text.setter def line_number_distance_from_text(self, line_number_distance_from_text): self._line_number_distance_from_text = line_number_distance_from_text @property def line_number_restart_mode(self): return self._line_number_restart_mode @line_number_restart_mode.setter def line_number_restart_mode(self, line_number_restart_mode): allowed_values = ["RestartPage", "RestartSection", "Continuous"] if not line_number_restart_mode.isdigit(): if line_number_restart_mode not in allowed_values: raise ValueError( "Invalid value for `line_number_restart_mode` ({0}), must be one of {1}" .format(line_number_restart_mode, allowed_values)) self._line_number_restart_mode = line_number_restart_mode else: self._line_number_restart_mode = allowed_values[int(line_number_restart_mode) if six.PY3 else long(line_number_restart_mode)] @property def line_starting_number(self): return self._line_starting_number @line_starting_number.setter def line_starting_number(self, line_starting_number): self._line_starting_number = line_starting_number @property def orientation(self): return self._orientation @orientation.setter def orientation(self, orientation): allowed_values = ["Portrait", "Landscape"] if not orientation.isdigit(): if orientation not in allowed_values: raise ValueError( "Invalid value for `orientation` ({0}), must be one of {1}" .format(orientation, allowed_values)) self._orientation = orientation else: self._orientation = allowed_values[int(orientation) if six.PY3 else long(orientation)] @property def other_pages_tray(self): return self._other_pages_tray @other_pages_tray.setter def other_pages_tray(self, other_pages_tray): self._other_pages_tray = other_pages_tray @property def page_height(self): return self._page_height @page_height.setter def page_height(self, page_height): self._page_height = page_height @property def page_number_style(self): return self._page_number_style @page_number_style.setter def page_number_style(self, page_number_style): allowed_values = ["Arabic", "UppercaseRoman", "LowercaseRoman", "UppercaseLetter", "LowercaseLetter", "Ordinal", "Number", "OrdinalText", "Hex", "ChicagoManual", "Kanji", "KanjiDigit", "AiueoHalfWidth", "IrohaHalfWidth", "ArabicFullWidth", "ArabicHalfWidth", "KanjiTraditional", "KanjiTraditional2", "NumberInCircle", "DecimalFullWidth", "Aiueo", "Iroha", "LeadingZero", "Bullet", "Ganada", "Chosung", "GB1", "GB2", "GB3", "GB4", "Zodiac1", "Zodiac2", "Zodiac3", "TradChinNum1", "TradChinNum2", "TradChinNum3", "TradChinNum4", "SimpChinNum1", "SimpChinNum2", "SimpChinNum3", "SimpChinNum4", "HanjaRead", "HanjaReadDigit", "Hangul", "Hanja", "Hebrew1", "Arabic1", "Hebrew2", "Arabic2", "HindiLetter1", "HindiLetter2", "HindiArabic", "HindiCardinalText", "ThaiLetter", "ThaiArabic", "ThaiCardinalText", "VietCardinalText", "NumberInDash", "LowercaseRussian", "UppercaseRussian", "None", "Custom"] if not page_number_style.isdigit(): if page_number_style not in allowed_values: raise ValueError( "Invalid value for `page_number_style` ({0}), must be one of {1}" .format(page_number_style, allowed_values)) self._page_number_style = page_number_style else: self._page_number_style = allowed_values[int(page_number_style) if six.PY3 else long(page_number_style)] @property def page_starting_number(self): return self._page_starting_number @page_starting_number.setter
MIT License
basho/riak-python-client
riak/security.py
SecurityCreds.username
python
def username(self): return self._username
Riak Username :rtype: str
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/security.py#L135-L141
import ssl import warnings from riak import RiakError from riak.util import str_to_long if hasattr(ssl, 'SSLContext'): USE_STDLIB_SSL = True else: USE_STDLIB_SSL = False if not USE_STDLIB_SSL: import OpenSSL.SSL from OpenSSL import crypto OPENSSL_VERSION_101G = 268439679 if hasattr(ssl, 'OPENSSL_VERSION_NUMBER'): sslver = ssl.OPENSSL_VERSION_NUMBER tls_12 = hasattr(ssl, 'PROTOCOL_TLSv1_2') if sslver < OPENSSL_VERSION_101G or not tls_12: verstring = ssl.OPENSSL_VERSION msg = "{0} (>= 1.0.1g required), TLS 1.2 support: {1}" .format(verstring, tls_12) warnings.warn(msg, UserWarning) if hasattr(ssl, 'PROTOCOL_TLSv1_2'): DEFAULT_TLS_VERSION = ssl.PROTOCOL_TLSv1_2 elif hasattr(ssl, 'PROTOCOL_TLSv1_1'): DEFAULT_TLS_VERSION = ssl.PROTOCOL_TLSv1_1 elif hasattr(ssl, 'PROTOCOL_TLSv1'): DEFAULT_TLS_VERSION = ssl.PROTOCOL_TLSv1 else: DEFAULT_TLS_VERSION = ssl.PROTOCOL_SSLv23 else: sslver = OpenSSL.SSL.OPENSSL_VERSION_NUMBER tls_12 = hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD') if (sslver < OPENSSL_VERSION_101G) or tls_12: verstring = OpenSSL.SSL.SSLeay_version(OpenSSL.SSL.SSLEAY_VERSION) msg = "{0} (>= 1.0.1g required), TLS 1.2 support: {1}" .format(verstring, tls_12) warnings.warn(msg, UserWarning) if hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): DEFAULT_TLS_VERSION = OpenSSL.SSL.TLSv1_2_METHOD elif hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): DEFAULT_TLS_VERSION = OpenSSL.SSL.TLSv1_1_METHOD elif hasattr(OpenSSL.SSL, 'TLSv1_METHOD'): DEFAULT_TLS_VERSION = OpenSSL.SSL.TLSv1_METHOD else: DEFAULT_TLS_VERSION = OpenSSL.SSL.SSLv23_METHOD class SecurityError(RiakError): def __init__(self, message="Security error"): super(SecurityError, self).__init__(message) class SecurityCreds: def __init__(self, username=None, password=None, pkey_file=None, pkey=None, cert_file=None, cert=None, cacert_file=None, cacert=None, crl_file=None, crl=None, ciphers=None, ssl_version=DEFAULT_TLS_VERSION): self._username = username self._password = password self._pkey_file = pkey_file self._pkey = pkey self._cert_file = cert_file self._cert = cert self._cacert_file = cacert_file self._cacert = cacert self._crl_file = crl_file self._crl = crl self._ciphers = ciphers self._ssl_version = ssl_version @property
Apache License 2.0
wangheda/youtube-8m
youtube-8m-wangheda/inference-pre-ensemble.py
get_input_data_tensors
python
def get_input_data_tensors(reader, data_pattern, batch_size, num_readers=1): with tf.name_scope("input"): files = gfile.Glob(data_pattern) files.sort() if not files: raise IOError("Unable to find input files. data_pattern='" + data_pattern + "'") logging.info("number of input files: " + str(len(files))) filename_queue = tf.train.string_input_producer( files, num_epochs=1, shuffle=False) examples_and_labels = reader.prepare_reader(filename_queue) video_id_batch, video_batch, unused_labels, num_frames_batch = ( tf.train.batch(examples_and_labels, batch_size=batch_size, capacity=batch_size * 8, allow_smaller_final_batch=True, enqueue_many=True)) return video_id_batch, video_batch, unused_labels, num_frames_batch
Creates the section of the graph which reads the input data. Args: reader: A class which parses the input data. data_pattern: A 'glob' style path to the data files. batch_size: How many examples to process at a time. num_readers: How many I/O threads to use. Returns: A tuple containing the features tensor, labels tensor, and optionally a tensor containing the number of frames per video. The exact dimensions depend on the reader being used. Raises: IOError: If no files matching the given pattern were found.
https://github.com/wangheda/youtube-8m/blob/07e54b387ee027cb58b0c14f5eb7c88cfa516d58/youtube-8m-wangheda/inference-pre-ensemble.py#L96-L129
import gc import os import time import eval_util import losses import frame_level_models import video_level_models import data_augmentation import feature_transform import readers import utils import numpy import numpy as np import tensorflow as tf from tensorflow import app from tensorflow import flags from tensorflow import gfile from tensorflow import logging FLAGS = flags.FLAGS if __name__ == '__main__': flags.DEFINE_string("train_dir", "/tmp/yt8m_model/", "The directory to load the model files from.") flags.DEFINE_string("model_checkpoint_path", None, "The file path to load the model from.") flags.DEFINE_string("output_dir", "", "The file to save the predictions to.") flags.DEFINE_string( "input_data_pattern", "", "File glob defining the evaluation dataset in tensorflow.SequenceExample " "format. The SequenceExamples are expected to have an 'rgb' byte array " "sequence feature as well as a 'labels' int64 context feature.") flags.DEFINE_string( "distill_data_pattern", None, "File glob defining the distillation data pattern") flags.DEFINE_bool( "frame_features", False, "If set, then --eval_data_pattern must be frame-level features. " "Otherwise, --eval_data_pattern must be aggregated video-level " "features. The model must also be set appropriately (i.e. to read 3D " "batches VS 4D batches.") flags.DEFINE_integer( "batch_size", 8192, "How many examples to process per batch.") flags.DEFINE_string("feature_names", "mean_rgb", "Name of the feature " "to use for training.") flags.DEFINE_string("feature_sizes", "1024", "Length of the feature vectors.") flags.DEFINE_integer("file_size", 4096, "Number of frames per batch for DBoF.") flags.DEFINE_string( "model", "YouShouldSpecifyAModel", "Which architecture to use for the model. Models are defined " "in models.py.") flags.DEFINE_integer("num_readers", 1, "How many threads to use for reading input files.") flags.DEFINE_integer("top_k", 20, "How many predictions to output per video.") flags.DEFINE_bool( "dropout", False, "Whether to consider dropout") flags.DEFINE_float("keep_prob", 1.0, "probability to keep output (used in dropout, keep it unchanged in validationg and test)") flags.DEFINE_float("noise_level", 0.0, "standard deviation of noise (added to hidden nodes)") def find_class_by_name(name, modules): modules = [getattr(module, name, None) for module in modules] return next(a for a in modules if a)
Apache License 2.0
pelioniot/mbed-cloud-sdk-python
src/mbed_cloud/_backends/device_directory/models/device_eq_neq_filter.py
DeviceEqNeqFilter.device_class
python
def device_class(self): return self._device_class
Gets the device_class of this DeviceEqNeqFilter. :return: The device_class of this DeviceEqNeqFilter. :rtype: str
https://github.com/pelioniot/mbed-cloud-sdk-python/blob/71dc67fc2a8d1aff31e35ec781fb328e6a60639c/src/mbed_cloud/_backends/device_directory/models/device_eq_neq_filter.py#L368-L375
from pprint import pformat from six import iteritems import re class DeviceEqNeqFilter(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'account_id': 'str', 'auto_update': 'bool', 'bootstrap_expiration_date': 'datetime', 'bootstrapped_timestamp': 'datetime', 'ca_id': 'str', 'connector_expiration_date': 'datetime', 'created_at': 'datetime', 'custom_attributes': 'dict(str, str)', 'deployed_state': 'str', 'deployment': 'str', 'description': 'str', 'device_class': 'str', 'device_execution_mode': 'int', 'device_key': 'str', 'endpoint_name': 'str', 'endpoint_type': 'str', 'enrolment_list_timestamp': 'datetime', 'etag': 'datetime', 'firmware_checksum': 'str', 'host_gateway': 'str', 'id': 'str', 'manifest': 'str', 'manifest_timestamp': 'datetime', 'mechanism': 'str', 'mechanism_url': 'str', 'name': 'str', 'serial_number': 'str', 'state': 'str', 'updated_at': 'datetime', 'vendor_id': 'str' } attribute_map = { 'account_id': 'account_id', 'auto_update': 'auto_update', 'bootstrap_expiration_date': 'bootstrap_expiration_date', 'bootstrapped_timestamp': 'bootstrapped_timestamp', 'ca_id': 'ca_id', 'connector_expiration_date': 'connector_expiration_date', 'created_at': 'created_at', 'custom_attributes': 'custom_attributes', 'deployed_state': 'deployed_state', 'deployment': 'deployment', 'description': 'description', 'device_class': 'device_class', 'device_execution_mode': 'device_execution_mode', 'device_key': 'device_key', 'endpoint_name': 'endpoint_name', 'endpoint_type': 'endpoint_type', 'enrolment_list_timestamp': 'enrolment_list_timestamp', 'etag': 'etag', 'firmware_checksum': 'firmware_checksum', 'host_gateway': 'host_gateway', 'id': 'id', 'manifest': 'manifest', 'manifest_timestamp': 'manifest_timestamp', 'mechanism': 'mechanism', 'mechanism_url': 'mechanism_url', 'name': 'name', 'serial_number': 'serial_number', 'state': 'state', 'updated_at': 'updated_at', 'vendor_id': 'vendor_id' } def __init__(self, account_id=None, auto_update=None, bootstrap_expiration_date=None, bootstrapped_timestamp=None, ca_id=None, connector_expiration_date=None, created_at=None, custom_attributes=None, deployed_state=None, deployment=None, description=None, device_class=None, device_execution_mode=None, device_key=None, endpoint_name=None, endpoint_type=None, enrolment_list_timestamp=None, etag=None, firmware_checksum=None, host_gateway=None, id=None, manifest=None, manifest_timestamp=None, mechanism=None, mechanism_url=None, name=None, serial_number=None, state=None, updated_at=None, vendor_id=None): self._account_id = account_id self._auto_update = auto_update self._bootstrap_expiration_date = bootstrap_expiration_date self._bootstrapped_timestamp = bootstrapped_timestamp self._ca_id = ca_id self._connector_expiration_date = connector_expiration_date self._created_at = created_at self._custom_attributes = custom_attributes self._deployed_state = deployed_state self._deployment = deployment self._description = description self._device_class = device_class self._device_execution_mode = device_execution_mode self._device_key = device_key self._endpoint_name = endpoint_name self._endpoint_type = endpoint_type self._enrolment_list_timestamp = enrolment_list_timestamp self._etag = etag self._firmware_checksum = firmware_checksum self._host_gateway = host_gateway self._id = id self._manifest = manifest self._manifest_timestamp = manifest_timestamp self._mechanism = mechanism self._mechanism_url = mechanism_url self._name = name self._serial_number = serial_number self._state = state self._updated_at = updated_at self._vendor_id = vendor_id self.discriminator = None @property def account_id(self): return self._account_id @account_id.setter def account_id(self, account_id): self._account_id = account_id @property def auto_update(self): return self._auto_update @auto_update.setter def auto_update(self, auto_update): self._auto_update = auto_update @property def bootstrap_expiration_date(self): return self._bootstrap_expiration_date @bootstrap_expiration_date.setter def bootstrap_expiration_date(self, bootstrap_expiration_date): self._bootstrap_expiration_date = bootstrap_expiration_date @property def bootstrapped_timestamp(self): return self._bootstrapped_timestamp @bootstrapped_timestamp.setter def bootstrapped_timestamp(self, bootstrapped_timestamp): self._bootstrapped_timestamp = bootstrapped_timestamp @property def ca_id(self): return self._ca_id @ca_id.setter def ca_id(self, ca_id): self._ca_id = ca_id @property def connector_expiration_date(self): return self._connector_expiration_date @connector_expiration_date.setter def connector_expiration_date(self, connector_expiration_date): self._connector_expiration_date = connector_expiration_date @property def created_at(self): return self._created_at @created_at.setter def created_at(self, created_at): self._created_at = created_at @property def custom_attributes(self): return self._custom_attributes @custom_attributes.setter def custom_attributes(self, custom_attributes): self._custom_attributes = custom_attributes @property def deployed_state(self): return self._deployed_state @deployed_state.setter def deployed_state(self, deployed_state): self._deployed_state = deployed_state @property def deployment(self): return self._deployment @deployment.setter def deployment(self, deployment): self._deployment = deployment @property def description(self): return self._description @description.setter def description(self, description): self._description = description @property
Apache License 2.0
alexpof/opycleid
opycleid/q_categoryaction.py
CategoryQAction.get_objects
python
def get_objects(self): return list(sorted(self.objects.items()))
Returns the objects in the category action. Parameters ---------- None Returns ------- A list of pairs (x,y), where: - x is the name of the object - y is the corresponding instance of CatObject
https://github.com/alexpof/opycleid/blob/5abffca1a52c0d221dcb46162cb0678f265204b7/opycleid/q_categoryaction.py#L791-L804
import numpy as np import itertools import time from .categoryaction import CatObject class MultQ(object): def __init__(self,x): if x<0 or x>1: raise Exception("Real number should be comprised between 0 and 1") self.x = x @staticmethod def Unit(): return MultQ(1.0) @staticmethod def Zero(): return MultQ(0.0) def __mul__(self,rhs): if not isinstance(rhs,self.__class__): raise Exception("RHS is not a valid MultQ") return self.__class__(self.x*rhs.x) def __add__(self,rhs): if not isinstance(rhs,self.__class__): raise Exception("RHS is not a valid MultQ") return self.__class__(max([self.x,rhs.x])) def __eq__(self,rhs): if not isinstance(rhs,self.__class__): raise Exception("RHS is not a valid MultQ") return self.x==rhs.x def __lt__(self,rhs): if not isinstance(rhs,self.__class__): raise Exception("RHS is not a valid MultQ") return self.x<rhs.x def __le__(self,rhs): if not isinstance(rhs,self.__class__): raise Exception("RHS is not a valid MultQ") return self.x<=rhs.x def __str__(self): return str(self.x) def __repr__(self): return "MultQ({})".format(self.x) class IntvQ(object): def __init__(self,x): if x<0 or x>1: raise Exception("Real number should be comprised between 0 and 1") self.x = x @staticmethod def Unit(): return IntvQ(1.0) @staticmethod def Zero(): return IntvQ(0.0) def __mul__(self,rhs): if not isinstance(rhs,self.__class__): raise Exception("RHS is not a valid IntvQ") return self.__class__(min([self.x,rhs.x])) def __add__(self,rhs): if not isinstance(rhs,self.__class__): raise Exception("RHS is not a valid IntvQ") return self.__class__(max([self.x,rhs.x])) def __eq__(self,rhs): if not isinstance(rhs,self.__class__): raise Exception("RHS is not a valid IntvQ") return self.x==rhs.x def __lt__(self,rhs): if not isinstance(rhs,self.__class__): raise Exception("RHS is not a valid IntvQ") return self.x<rhs.x def __le__(self,rhs): if not isinstance(rhs,self.__class__): raise Exception("RHS is not a valid IntvQ") return self.x<=rhs.x def __str__(self): return str(self.x) def __repr__(self): return "IntvQ({})".format(self.x) class Lin3Q(IntvQ): def __init__(self,x): if not (x==0 or x==0.5 or x==1): raise Exception("The possibles values are 0, 1/2, and 1") super().__init__(x) @staticmethod def Unit(): return Lin3Q(1.0) @staticmethod def Zero(): return Lin3Q(0.0) def __str__(self): return str(self.x) def __repr__(self): return "Lin3Q({})".format(self.x) class QMorphism(object): def __init__(self,name,source,target,qtype=None,mapping=None): if not isinstance(source,CatObject): raise Exception("Source is not a valid CatObject class\n") if not isinstance(target,CatObject): raise Exception("Target is not a valid CatObject class\n") if qtype is None: raise Exception("Type of quantale should be specified") self.name = name self.source = source self.target = target self.qtype = qtype if mapping is not None: if isinstance(mapping,np.ndarray)==False: self.set_mapping(mapping) else: self.set_mapping_matrix(mapping) def set_name(self,name): if not len(name): raise Exception("The specified morphism name is empty") self.name = name def set_to_identity(self): if not (self.source==self.target): raise Exception("Source and target should be identical") card_source = self.source.get_cardinality() M = np.empty((card_source,card_source),dtype=self.qtype) for i in range(card_source): for j in range(card_source): if i==j: M[i,j] = self.qtype.Unit() else: M[i,j] = self.qtype.Zero() self.matrix = M def set_mapping(self,mapping): card_source = self.source.get_cardinality() card_target = self.target.get_cardinality() self.matrix = np.empty((card_target,card_source),dtype=self.qtype) for i in range(card_source): for j in range(card_target): self.matrix[j,i] = self.qtype.Zero() for elem,images in sorted(mapping.items()): id_elem = self.source.get_idx_by_name(elem) for image,value in images: id_image = self.target.get_idx_by_name(image) self.matrix[id_image,id_elem] = self.qtype(value) def set_mapping_matrix(self,matrix): self.matrix = matrix def get_mapping(self): dest_cardinality,source_cardinality = self.matrix.shape d={} for i in range(source_cardinality): l=[] for j in range(dest_cardinality): v = self.matrix[j,i] l.append((self.target.get_name_by_idx(j),v.x)) d[self.source.get_name_by_idx(i)]=l return d def get_mapping_matrix(self): return self.matrix def copy(self): U = QMorphism(self.name,self.source,self.target,qtype=self.qtype) U.set_mapping_matrix(self.get_mapping_matrix()) return U def _is_lefttotal(self): return np.all(np.sum(self.matrix,axis=0)>self.qtype.Zero()) def __str__(self): descr = self.name+":"+self.source.name+"->"+self.target.name+"\n\n" for s,t in sorted(self.get_mapping().items()): descr += " "*(len(self.name)+1) descr += s+"->"+(",".join([(x[0],str(x[1])) for x in t]))+"\n" return descr def __call__(self,elem): idx_elem = self.source.get_idx_by_name(elem) return [(self.target.get_name_by_idx(j),v.x) for j,v in enumerate(self.matrix[:,idx_elem]) if v!=self.qtype.Zero()] def __pow__(self,int_power): if not self.target==self.source: raise Exception("Morphism should be an endomorphism") U = self.copy() U.set_to_identity() for i in range(int_power): U = self*U U.set_name(self.name+"^"+str(int_power)) return U def __mul__(self,morphism): if not isinstance(morphism,QMorphism): raise Exception("RHS is not a valid QMorphism class\n") if not self.qtype==morphism.qtype: raise Exception("QMorphisms use different quantales") if not morphism.target==self.source: return None new_morphism = QMorphism(self.name+morphism.name,morphism.source,self.target,qtype=self.qtype) new_morphism.set_mapping_matrix((self.matrix.dot(morphism.matrix))) return new_morphism def __eq__(self,morphism): if not isinstance(morphism,QMorphism): raise Exception("RHS is not a valid QMorphism class\n") if not self.qtype==morphism.qtype: raise Exception("QMorphisms use different quantales") if self is None or morphism is None: return False return (self.source == morphism.source) and (self.target == morphism.target) and (np.array_equal(self.matrix,morphism.matrix)) def __le__(self, morphism): if not isinstance(morphism,QMorphism): raise Exception("RHS is not a valid CatMorphism class\n") if not self.qtype==morphism.qtype: raise Exception("QMorphisms use different quantales") if self is None or morphism is None: return False if not (self.source == morphism.source) and (self.target == morphism.target): raise Exception("Morphisms should have the same domain and codomain") return np.all(self.matrix<=morphism.matrix) def __lt__(self, morphism): if not isinstance(morphism,QMorphism): raise Exception("RHS is not a valid CatMorphism class\n") if not self.qtype==morphism.qtype: raise Exception("QMorphisms use different quantales") if not (self.source == morphism.source) and (self.target == morphism.target): raise Exception("Morphisms should have the same domain and codomain") if self is None or morphism is None: return False return np.all(self.matrix<morphism.matrix) class CategoryQAction(object): def __init__(self,qtype=None,objects=None,generators=None,generate=True): if qtype is None: raise Exception("Type of quantale should be specified") self.qtype=qtype self.objects={} self.generators={} self.morphisms={} self.equivalences=[] if objects is not None: self.set_objects(objects) if generators is not None: self.set_generators(generators) if generate==True: self.generate_category() def set_objects(self,list_objects): self.objects={} self.generators={} self.morphisms={} self.equivalences=[] ob_names = [catobject.name for catobject in list_objects] if not len(ob_names)==len(np.unique(ob_names)): raise Exception("Objects should have distinct names") for catobject in list_objects: self.objects[catobject.name] = catobject
BSD 3-Clause New or Revised License
demisto/demisto-py
demisto_client/demisto_api/models/automation_script.py
AutomationScript.roles
python
def roles(self): return self._roles
Gets the roles of this AutomationScript. # noqa: E501 The role assigned to this investigation # noqa: E501 :return: The roles of this AutomationScript. # noqa: E501 :rtype: list[str]
https://github.com/demisto/demisto-py/blob/95d29e07693d27c133f7fe6ef9da13e4b6dbf542/demisto_client/demisto_api/models/automation_script.py#L670-L678
import pprint import re import six from demisto_client.demisto_api.models.argument import Argument from demisto_client.demisto_api.models.duration import Duration from demisto_client.demisto_api.models.important import Important from demisto_client.demisto_api.models.output import Output from demisto_client.demisto_api.models.script_sub_type import ScriptSubType from demisto_client.demisto_api.models.script_target import ScriptTarget from demisto_client.demisto_api.models.script_type import ScriptType class AutomationScript(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'arguments': 'list[Argument]', 'comment': 'str', 'commit_message': 'str', 'context_keys': 'list[str]', 'depends_on': 'dict(str, list[str])', 'deprecated': 'bool', 'docker_image': 'str', 'enabled': 'bool', 'has_role': 'bool', 'hidden': 'bool', 'id': 'str', 'important': 'list[Important]', 'locked': 'bool', 'modified': 'datetime', 'name': 'str', 'outputs': 'list[Output]', 'prev_name': 'str', 'previous_roles': 'list[str]', 'pswd': 'str', 'raw_tags': 'list[str]', 'roles': 'list[str]', 'run_as': 'str', 'run_once': 'bool', 'script': 'str', 'script_target': 'ScriptTarget', 'searchable_name': 'str', 'sensitive': 'bool', 'should_commit': 'bool', 'sort_values': 'list[str]', 'subtype': 'ScriptSubType', 'system': 'bool', 'tags': 'list[str]', 'timeout': 'Duration', 'type': 'ScriptType', 'user': 'str', 'vc_should_ignore': 'bool', 'version': 'int', 'visual_script': 'str' } attribute_map = { 'arguments': 'arguments', 'comment': 'comment', 'commit_message': 'commitMessage', 'context_keys': 'contextKeys', 'depends_on': 'dependsOn', 'deprecated': 'deprecated', 'docker_image': 'dockerImage', 'enabled': 'enabled', 'has_role': 'hasRole', 'hidden': 'hidden', 'id': 'id', 'important': 'important', 'locked': 'locked', 'modified': 'modified', 'name': 'name', 'outputs': 'outputs', 'prev_name': 'prevName', 'previous_roles': 'previousRoles', 'pswd': 'pswd', 'raw_tags': 'rawTags', 'roles': 'roles', 'run_as': 'runAs', 'run_once': 'runOnce', 'script': 'script', 'script_target': 'scriptTarget', 'searchable_name': 'searchableName', 'sensitive': 'sensitive', 'should_commit': 'shouldCommit', 'sort_values': 'sortValues', 'subtype': 'subtype', 'system': 'system', 'tags': 'tags', 'timeout': 'timeout', 'type': 'type', 'user': 'user', 'vc_should_ignore': 'vcShouldIgnore', 'version': 'version', 'visual_script': 'visualScript' } def __init__(self, arguments=None, comment=None, commit_message=None, context_keys=None, depends_on=None, deprecated=None, docker_image=None, enabled=None, has_role=None, hidden=None, id=None, important=None, locked=None, modified=None, name=None, outputs=None, prev_name=None, previous_roles=None, pswd=None, raw_tags=None, roles=None, run_as=None, run_once=None, script=None, script_target=None, searchable_name=None, sensitive=None, should_commit=None, sort_values=None, subtype=None, system=None, tags=None, timeout=None, type=None, user=None, vc_should_ignore=None, version=None, visual_script=None): self._arguments = None self._comment = None self._commit_message = None self._context_keys = None self._depends_on = None self._deprecated = None self._docker_image = None self._enabled = None self._has_role = None self._hidden = None self._id = None self._important = None self._locked = None self._modified = None self._name = None self._outputs = None self._prev_name = None self._previous_roles = None self._pswd = None self._raw_tags = None self._roles = None self._run_as = None self._run_once = None self._script = None self._script_target = None self._searchable_name = None self._sensitive = None self._should_commit = None self._sort_values = None self._subtype = None self._system = None self._tags = None self._timeout = None self._type = None self._user = None self._vc_should_ignore = None self._version = None self._visual_script = None self.discriminator = None if arguments is not None: self.arguments = arguments if comment is not None: self.comment = comment if commit_message is not None: self.commit_message = commit_message if context_keys is not None: self.context_keys = context_keys if depends_on is not None: self.depends_on = depends_on if deprecated is not None: self.deprecated = deprecated if docker_image is not None: self.docker_image = docker_image if enabled is not None: self.enabled = enabled if has_role is not None: self.has_role = has_role if hidden is not None: self.hidden = hidden if id is not None: self.id = id if important is not None: self.important = important if locked is not None: self.locked = locked if modified is not None: self.modified = modified if name is not None: self.name = name if outputs is not None: self.outputs = outputs if prev_name is not None: self.prev_name = prev_name if previous_roles is not None: self.previous_roles = previous_roles if pswd is not None: self.pswd = pswd if raw_tags is not None: self.raw_tags = raw_tags if roles is not None: self.roles = roles if run_as is not None: self.run_as = run_as if run_once is not None: self.run_once = run_once if script is not None: self.script = script if script_target is not None: self.script_target = script_target if searchable_name is not None: self.searchable_name = searchable_name if sensitive is not None: self.sensitive = sensitive if should_commit is not None: self.should_commit = should_commit if sort_values is not None: self.sort_values = sort_values if subtype is not None: self.subtype = subtype if system is not None: self.system = system if tags is not None: self.tags = tags if timeout is not None: self.timeout = timeout if type is not None: self.type = type if user is not None: self.user = user if vc_should_ignore is not None: self.vc_should_ignore = vc_should_ignore if version is not None: self.version = version if visual_script is not None: self.visual_script = visual_script @property def arguments(self): return self._arguments @arguments.setter def arguments(self, arguments): self._arguments = arguments @property def comment(self): return self._comment @comment.setter def comment(self, comment): self._comment = comment @property def commit_message(self): return self._commit_message @commit_message.setter def commit_message(self, commit_message): self._commit_message = commit_message @property def context_keys(self): return self._context_keys @context_keys.setter def context_keys(self, context_keys): self._context_keys = context_keys @property def depends_on(self): return self._depends_on @depends_on.setter def depends_on(self, depends_on): self._depends_on = depends_on @property def deprecated(self): return self._deprecated @deprecated.setter def deprecated(self, deprecated): self._deprecated = deprecated @property def docker_image(self): return self._docker_image @docker_image.setter def docker_image(self, docker_image): self._docker_image = docker_image @property def enabled(self): return self._enabled @enabled.setter def enabled(self, enabled): self._enabled = enabled @property def has_role(self): return self._has_role @has_role.setter def has_role(self, has_role): self._has_role = has_role @property def hidden(self): return self._hidden @hidden.setter def hidden(self, hidden): self._hidden = hidden @property def id(self): return self._id @id.setter def id(self, id): self._id = id @property def important(self): return self._important @important.setter def important(self, important): self._important = important @property def locked(self): return self._locked @locked.setter def locked(self, locked): self._locked = locked @property def modified(self): return self._modified @modified.setter def modified(self, modified): self._modified = modified @property def name(self): return self._name @name.setter def name(self, name): self._name = name @property def outputs(self): return self._outputs @outputs.setter def outputs(self, outputs): self._outputs = outputs @property def prev_name(self): return self._prev_name @prev_name.setter def prev_name(self, prev_name): self._prev_name = prev_name @property def previous_roles(self): return self._previous_roles @previous_roles.setter def previous_roles(self, previous_roles): self._previous_roles = previous_roles @property def pswd(self): return self._pswd @pswd.setter def pswd(self, pswd): self._pswd = pswd @property def raw_tags(self): return self._raw_tags @raw_tags.setter def raw_tags(self, raw_tags): self._raw_tags = raw_tags @property
Apache License 2.0
qecsim/qecsim
src/qecsim/cli.py
run
python
def run(code, error_model, decoder, error_probabilities, max_failures, max_runs, output, random_seed): code.validate() logger.info('RUN STARTING: code={}, error_model={}, decoder={}, error_probabilities={}, max_failures={}, ' 'max_runs={}, random_seed={}.' .format(code, error_model, decoder, error_probabilities, max_failures, max_runs, random_seed)) data = [] for error_probability in error_probabilities: runs_data = app.run(code, error_model, decoder, error_probability, max_runs=max_runs, max_failures=max_failures, random_seed=random_seed) data.append(runs_data) logger.info('RUN COMPLETE: data={}'.format(data)) _write_data(output, data)
Simulate quantum error correction. Arguments: \b CODE Stabilizer code in format name(<args>) #CODE_PARAMETERS# \b ERROR_MODEL Error model in format name(<args>) #ERROR_MODEL_PARAMETERS# \b DECODER Decoder in format name(<args>) #DECODER_PARAMETERS# \b ERROR_PROBABILITY... One or more probabilities as FLOAT in [0.0, 1.0] Examples: qecsim run -r10 "five_qubit" "generic.depolarizing" "generic.naive" 0.1 qecsim run -f5 -r50 -s13 "steane" "generic.phase_flip" "generic.naive" 0.1 qecsim run -r20 "planar(7,7)" "generic.bit_flip" "planar.mps(6)" 0.101 0.102 0.103 qecsim run -r10 "color666(7)" "generic.bit_flip" "color666.mps(16)" 0.09 0.10 qecsim run -o"data.json" -f9 "toric(3,3)" "generic.bit_flip" "toric.mwpm" 0.1
https://github.com/qecsim/qecsim/blob/24d6b8a320b292461b66b68fe4fba40c9ddc2257/src/qecsim/cli.py#L215-L265
import ast import inspect import json import logging import re import click import pkg_resources import qecsim from qecsim import app from qecsim import util from qecsim.model import ATTR_CLI_DESCRIPTION logger = logging.getLogger(__name__) class _ConstructorParamType(click.ParamType): name = 'constructor' def __init__(self, constructors): self._constructors = constructors def get_metavar(self, param): return '[{}]'.format('|'.join(sorted(self._constructors.keys()))) def get_missing_message(self, param): return '(choose from {})'.format(', '.join(sorted(self._constructors.keys()))) def convert(self, value, param, ctx): constructor_match = re.fullmatch(r''' # match 'toric(3,3)' as {'constructor_name': 'toric', 'constructor_args': '3,3'} (?P<constructor_name>[\w.]+) # capture constructor_name, e.g. 'toric' (?:\(\s* # skip opening parenthesis and leading whitespace (?P<constructor_args>.*?) # capture constructor_args, e.g. '3,3' ,?\s*\))? # skip trailing comma, trailing whitespace and closing parenthesis ''', value, re.VERBOSE) if constructor_match is None: self.fail('{} (format as name(<args>))'.format(value), param, ctx) constructor_name = constructor_match.group('constructor_name') if constructor_name in self._constructors.keys(): constructor = self._constructors[constructor_name] else: self.fail('{} (choose from {})'.format(value, ', '.join(sorted(self._constructors.keys()))), param, ctx) constructor_args = constructor_match.group('constructor_args') if constructor_args: try: arguments = ast.literal_eval(constructor_args + ',') except Exception as ex: self.fail('{} (failed to parse arguments "{}")'.format(value, ex), param, ctx) else: arguments = tuple() try: return constructor(*arguments) except Exception as ex: self.fail('{} (failed to construct "{}")'.format(value, ex), param, ctx) def __repr__(self): return '{}({!r})'.format(type(self).__name__, self._constructors) def _model_argument(model_type): def _decorator(func): entry_point_id = 'qecsim.cli.{}.{}s'.format(func.__name__, model_type) entry_points = sorted(pkg_resources.iter_entry_points(entry_point_id), key=lambda ep: ep.name) constructors = {ep.name: ep.load() for ep in entry_points} func = click.argument(model_type, type=_ConstructorParamType(constructors), metavar=model_type.upper())(func) model_definition_list = [(name, getattr(cls, ATTR_CLI_DESCRIPTION, '')) for name, cls in constructors.items()] formatter = click.HelpFormatter() formatter.indent() if model_definition_list: formatter.write_dl(model_definition_list) model_doc_placeholder = '#{}_PARAMETERS#'.format(model_type.upper()) func.__doc__ = inspect.getdoc(func).replace(model_doc_placeholder, formatter.getvalue()) return func return _decorator def _validate_error_probability(ctx, param, value): if not (0 <= value <= 1): raise click.BadParameter('{} is not in [0.0, 1.0]'.format(value), ctx, param) return value def _validate_error_probabilities(ctx, param, value): for v in value: _validate_error_probability(ctx, param, v) return value def _validate_measurement_error_probability(ctx, param, value): if not (value is None or (0 <= value <= 1)): raise click.BadParameter('{} is not in [0.0, 1.0]'.format(value), ctx, param) return value @click.group() @click.version_option(version=qecsim.__version__, prog_name='qecsim') def cli(): util.init_logging() @cli.command() @_model_argument('code') @_model_argument('error_model') @_model_argument('decoder') @click.argument('error_probabilities', required=True, nargs=-1, type=float, metavar='ERROR_PROBABILITY...', callback=_validate_error_probabilities) @click.option('--max-failures', '-f', type=click.IntRange(min=1), metavar='INT', help='Maximum number of failures for each probability.') @click.option('--max-runs', '-r', type=click.IntRange(min=1), metavar='INT', help='Maximum number of runs for each probability. [default: 1 if max-failures unspecified]') @click.option('--output', '-o', default='-', type=click.Path(allow_dash=True), metavar='FILENAME', help='Output file. (Writes to log if file exists).') @click.option('--random-seed', '-s', type=click.IntRange(min=0), metavar='INT', help='Random seed for qubit error generation. (Re-applied for each probability).')
BSD 3-Clause New or Revised License
docusign/docusign-python-client
docusign_esign/models/currency.py
Currency.anchor_units
python
def anchor_units(self, anchor_units): self._anchor_units = anchor_units
Sets the anchor_units of this Currency. Specifies units of the X and Y offset. Units could be pixels, millimeters, centimeters, or inches. # noqa: E501 :param anchor_units: The anchor_units of this Currency. # noqa: E501 :type: str
https://github.com/docusign/docusign-python-client/blob/c6aeafff0d046fa6c10a398be83ba9e24b05d4ea/docusign_esign/models/currency.py#L806-L815
import pprint import re import six from docusign_esign.client.configuration import Configuration class Currency(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'anchor_allow_white_space_in_characters': 'str', 'anchor_allow_white_space_in_characters_metadata': 'PropertyMetadata', 'anchor_case_sensitive': 'str', 'anchor_case_sensitive_metadata': 'PropertyMetadata', 'anchor_horizontal_alignment': 'str', 'anchor_horizontal_alignment_metadata': 'PropertyMetadata', 'anchor_ignore_if_not_present': 'str', 'anchor_ignore_if_not_present_metadata': 'PropertyMetadata', 'anchor_match_whole_word': 'str', 'anchor_match_whole_word_metadata': 'PropertyMetadata', 'anchor_string': 'str', 'anchor_string_metadata': 'PropertyMetadata', 'anchor_tab_processor_version': 'str', 'anchor_tab_processor_version_metadata': 'PropertyMetadata', 'anchor_units': 'str', 'anchor_units_metadata': 'PropertyMetadata', 'anchor_x_offset': 'str', 'anchor_x_offset_metadata': 'PropertyMetadata', 'anchor_y_offset': 'str', 'anchor_y_offset_metadata': 'PropertyMetadata', 'bold': 'str', 'bold_metadata': 'PropertyMetadata', 'conceal_value_on_document': 'str', 'conceal_value_on_document_metadata': 'PropertyMetadata', 'conditional_parent_label': 'str', 'conditional_parent_label_metadata': 'PropertyMetadata', 'conditional_parent_value': 'str', 'conditional_parent_value_metadata': 'PropertyMetadata', 'custom_tab_id': 'str', 'custom_tab_id_metadata': 'PropertyMetadata', 'disable_auto_size': 'str', 'disable_auto_size_metadata': 'PropertyMetadata', 'document_id': 'str', 'document_id_metadata': 'PropertyMetadata', 'error_details': 'ErrorDetails', 'font': 'str', 'font_color': 'str', 'font_color_metadata': 'PropertyMetadata', 'font_metadata': 'PropertyMetadata', 'font_size': 'str', 'font_size_metadata': 'PropertyMetadata', 'form_order': 'str', 'form_order_metadata': 'PropertyMetadata', 'form_page_label': 'str', 'form_page_label_metadata': 'PropertyMetadata', 'form_page_number': 'str', 'form_page_number_metadata': 'PropertyMetadata', 'height': 'str', 'height_metadata': 'PropertyMetadata', 'italic': 'str', 'italic_metadata': 'PropertyMetadata', 'locale_policy': 'LocalePolicyTab', 'locked': 'str', 'locked_metadata': 'PropertyMetadata', 'max_length': 'str', 'max_length_metadata': 'PropertyMetadata', 'merge_field': 'MergeField', 'merge_field_xml': 'str', 'name': 'str', 'name_metadata': 'PropertyMetadata', 'numerical_value': 'str', 'original_value': 'str', 'original_value_metadata': 'PropertyMetadata', 'page_number': 'str', 'page_number_metadata': 'PropertyMetadata', 'recipient_id': 'str', 'recipient_id_guid': 'str', 'recipient_id_guid_metadata': 'PropertyMetadata', 'recipient_id_metadata': 'PropertyMetadata', 'require_all': 'str', 'require_all_metadata': 'PropertyMetadata', 'required': 'str', 'required_metadata': 'PropertyMetadata', 'require_initial_on_shared_change': 'str', 'require_initial_on_shared_change_metadata': 'PropertyMetadata', 'sender_required': 'str', 'sender_required_metadata': 'PropertyMetadata', 'shared': 'str', 'shared_metadata': 'PropertyMetadata', 'smart_contract_information': 'SmartContractInformation', 'source': 'str', 'status': 'str', 'status_metadata': 'PropertyMetadata', 'tab_group_labels': 'list[str]', 'tab_group_labels_metadata': 'PropertyMetadata', 'tab_id': 'str', 'tab_id_metadata': 'PropertyMetadata', 'tab_label': 'str', 'tab_label_metadata': 'PropertyMetadata', 'tab_order': 'str', 'tab_order_metadata': 'PropertyMetadata', 'tab_type': 'str', 'tab_type_metadata': 'PropertyMetadata', 'template_locked': 'str', 'template_locked_metadata': 'PropertyMetadata', 'template_required': 'str', 'template_required_metadata': 'PropertyMetadata', 'tooltip': 'str', 'tool_tip_metadata': 'PropertyMetadata', 'underline': 'str', 'underline_metadata': 'PropertyMetadata', 'value': 'str', 'value_metadata': 'PropertyMetadata', 'width': 'str', 'width_metadata': 'PropertyMetadata', 'x_position': 'str', 'x_position_metadata': 'PropertyMetadata', 'y_position': 'str', 'y_position_metadata': 'PropertyMetadata' } attribute_map = { 'anchor_allow_white_space_in_characters': 'anchorAllowWhiteSpaceInCharacters', 'anchor_allow_white_space_in_characters_metadata': 'anchorAllowWhiteSpaceInCharactersMetadata', 'anchor_case_sensitive': 'anchorCaseSensitive', 'anchor_case_sensitive_metadata': 'anchorCaseSensitiveMetadata', 'anchor_horizontal_alignment': 'anchorHorizontalAlignment', 'anchor_horizontal_alignment_metadata': 'anchorHorizontalAlignmentMetadata', 'anchor_ignore_if_not_present': 'anchorIgnoreIfNotPresent', 'anchor_ignore_if_not_present_metadata': 'anchorIgnoreIfNotPresentMetadata', 'anchor_match_whole_word': 'anchorMatchWholeWord', 'anchor_match_whole_word_metadata': 'anchorMatchWholeWordMetadata', 'anchor_string': 'anchorString', 'anchor_string_metadata': 'anchorStringMetadata', 'anchor_tab_processor_version': 'anchorTabProcessorVersion', 'anchor_tab_processor_version_metadata': 'anchorTabProcessorVersionMetadata', 'anchor_units': 'anchorUnits', 'anchor_units_metadata': 'anchorUnitsMetadata', 'anchor_x_offset': 'anchorXOffset', 'anchor_x_offset_metadata': 'anchorXOffsetMetadata', 'anchor_y_offset': 'anchorYOffset', 'anchor_y_offset_metadata': 'anchorYOffsetMetadata', 'bold': 'bold', 'bold_metadata': 'boldMetadata', 'conceal_value_on_document': 'concealValueOnDocument', 'conceal_value_on_document_metadata': 'concealValueOnDocumentMetadata', 'conditional_parent_label': 'conditionalParentLabel', 'conditional_parent_label_metadata': 'conditionalParentLabelMetadata', 'conditional_parent_value': 'conditionalParentValue', 'conditional_parent_value_metadata': 'conditionalParentValueMetadata', 'custom_tab_id': 'customTabId', 'custom_tab_id_metadata': 'customTabIdMetadata', 'disable_auto_size': 'disableAutoSize', 'disable_auto_size_metadata': 'disableAutoSizeMetadata', 'document_id': 'documentId', 'document_id_metadata': 'documentIdMetadata', 'error_details': 'errorDetails', 'font': 'font', 'font_color': 'fontColor', 'font_color_metadata': 'fontColorMetadata', 'font_metadata': 'fontMetadata', 'font_size': 'fontSize', 'font_size_metadata': 'fontSizeMetadata', 'form_order': 'formOrder', 'form_order_metadata': 'formOrderMetadata', 'form_page_label': 'formPageLabel', 'form_page_label_metadata': 'formPageLabelMetadata', 'form_page_number': 'formPageNumber', 'form_page_number_metadata': 'formPageNumberMetadata', 'height': 'height', 'height_metadata': 'heightMetadata', 'italic': 'italic', 'italic_metadata': 'italicMetadata', 'locale_policy': 'localePolicy', 'locked': 'locked', 'locked_metadata': 'lockedMetadata', 'max_length': 'maxLength', 'max_length_metadata': 'maxLengthMetadata', 'merge_field': 'mergeField', 'merge_field_xml': 'mergeFieldXml', 'name': 'name', 'name_metadata': 'nameMetadata', 'numerical_value': 'numericalValue', 'original_value': 'originalValue', 'original_value_metadata': 'originalValueMetadata', 'page_number': 'pageNumber', 'page_number_metadata': 'pageNumberMetadata', 'recipient_id': 'recipientId', 'recipient_id_guid': 'recipientIdGuid', 'recipient_id_guid_metadata': 'recipientIdGuidMetadata', 'recipient_id_metadata': 'recipientIdMetadata', 'require_all': 'requireAll', 'require_all_metadata': 'requireAllMetadata', 'required': 'required', 'required_metadata': 'requiredMetadata', 'require_initial_on_shared_change': 'requireInitialOnSharedChange', 'require_initial_on_shared_change_metadata': 'requireInitialOnSharedChangeMetadata', 'sender_required': 'senderRequired', 'sender_required_metadata': 'senderRequiredMetadata', 'shared': 'shared', 'shared_metadata': 'sharedMetadata', 'smart_contract_information': 'smartContractInformation', 'source': 'source', 'status': 'status', 'status_metadata': 'statusMetadata', 'tab_group_labels': 'tabGroupLabels', 'tab_group_labels_metadata': 'tabGroupLabelsMetadata', 'tab_id': 'tabId', 'tab_id_metadata': 'tabIdMetadata', 'tab_label': 'tabLabel', 'tab_label_metadata': 'tabLabelMetadata', 'tab_order': 'tabOrder', 'tab_order_metadata': 'tabOrderMetadata', 'tab_type': 'tabType', 'tab_type_metadata': 'tabTypeMetadata', 'template_locked': 'templateLocked', 'template_locked_metadata': 'templateLockedMetadata', 'template_required': 'templateRequired', 'template_required_metadata': 'templateRequiredMetadata', 'tooltip': 'tooltip', 'tool_tip_metadata': 'toolTipMetadata', 'underline': 'underline', 'underline_metadata': 'underlineMetadata', 'value': 'value', 'value_metadata': 'valueMetadata', 'width': 'width', 'width_metadata': 'widthMetadata', 'x_position': 'xPosition', 'x_position_metadata': 'xPositionMetadata', 'y_position': 'yPosition', 'y_position_metadata': 'yPositionMetadata' } def __init__(self, _configuration=None, **kwargs): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._anchor_allow_white_space_in_characters = None self._anchor_allow_white_space_in_characters_metadata = None self._anchor_case_sensitive = None self._anchor_case_sensitive_metadata = None self._anchor_horizontal_alignment = None self._anchor_horizontal_alignment_metadata = None self._anchor_ignore_if_not_present = None self._anchor_ignore_if_not_present_metadata = None self._anchor_match_whole_word = None self._anchor_match_whole_word_metadata = None self._anchor_string = None self._anchor_string_metadata = None self._anchor_tab_processor_version = None self._anchor_tab_processor_version_metadata = None self._anchor_units = None self._anchor_units_metadata = None self._anchor_x_offset = None self._anchor_x_offset_metadata = None self._anchor_y_offset = None self._anchor_y_offset_metadata = None self._bold = None self._bold_metadata = None self._conceal_value_on_document = None self._conceal_value_on_document_metadata = None self._conditional_parent_label = None self._conditional_parent_label_metadata = None self._conditional_parent_value = None self._conditional_parent_value_metadata = None self._custom_tab_id = None self._custom_tab_id_metadata = None self._disable_auto_size = None self._disable_auto_size_metadata = None self._document_id = None self._document_id_metadata = None self._error_details = None self._font = None self._font_color = None self._font_color_metadata = None self._font_metadata = None self._font_size = None self._font_size_metadata = None self._form_order = None self._form_order_metadata = None self._form_page_label = None self._form_page_label_metadata = None self._form_page_number = None self._form_page_number_metadata = None self._height = None self._height_metadata = None self._italic = None self._italic_metadata = None self._locale_policy = None self._locked = None self._locked_metadata = None self._max_length = None self._max_length_metadata = None self._merge_field = None self._merge_field_xml = None self._name = None self._name_metadata = None self._numerical_value = None self._original_value = None self._original_value_metadata = None self._page_number = None self._page_number_metadata = None self._recipient_id = None self._recipient_id_guid = None self._recipient_id_guid_metadata = None self._recipient_id_metadata = None self._require_all = None self._require_all_metadata = None self._required = None self._required_metadata = None self._require_initial_on_shared_change = None self._require_initial_on_shared_change_metadata = None self._sender_required = None self._sender_required_metadata = None self._shared = None self._shared_metadata = None self._smart_contract_information = None self._source = None self._status = None self._status_metadata = None self._tab_group_labels = None self._tab_group_labels_metadata = None self._tab_id = None self._tab_id_metadata = None self._tab_label = None self._tab_label_metadata = None self._tab_order = None self._tab_order_metadata = None self._tab_type = None self._tab_type_metadata = None self._template_locked = None self._template_locked_metadata = None self._template_required = None self._template_required_metadata = None self._tooltip = None self._tool_tip_metadata = None self._underline = None self._underline_metadata = None self._value = None self._value_metadata = None self._width = None self._width_metadata = None self._x_position = None self._x_position_metadata = None self._y_position = None self._y_position_metadata = None self.discriminator = None setattr(self, "_{}".format('anchor_allow_white_space_in_characters'), kwargs.get('anchor_allow_white_space_in_characters', None)) setattr(self, "_{}".format('anchor_allow_white_space_in_characters_metadata'), kwargs.get('anchor_allow_white_space_in_characters_metadata', None)) setattr(self, "_{}".format('anchor_case_sensitive'), kwargs.get('anchor_case_sensitive', None)) setattr(self, "_{}".format('anchor_case_sensitive_metadata'), kwargs.get('anchor_case_sensitive_metadata', None)) setattr(self, "_{}".format('anchor_horizontal_alignment'), kwargs.get('anchor_horizontal_alignment', None)) setattr(self, "_{}".format('anchor_horizontal_alignment_metadata'), kwargs.get('anchor_horizontal_alignment_metadata', None)) setattr(self, "_{}".format('anchor_ignore_if_not_present'), kwargs.get('anchor_ignore_if_not_present', None)) setattr(self, "_{}".format('anchor_ignore_if_not_present_metadata'), kwargs.get('anchor_ignore_if_not_present_metadata', None)) setattr(self, "_{}".format('anchor_match_whole_word'), kwargs.get('anchor_match_whole_word', None)) setattr(self, "_{}".format('anchor_match_whole_word_metadata'), kwargs.get('anchor_match_whole_word_metadata', None)) setattr(self, "_{}".format('anchor_string'), kwargs.get('anchor_string', None)) setattr(self, "_{}".format('anchor_string_metadata'), kwargs.get('anchor_string_metadata', None)) setattr(self, "_{}".format('anchor_tab_processor_version'), kwargs.get('anchor_tab_processor_version', None)) setattr(self, "_{}".format('anchor_tab_processor_version_metadata'), kwargs.get('anchor_tab_processor_version_metadata', None)) setattr(self, "_{}".format('anchor_units'), kwargs.get('anchor_units', None)) setattr(self, "_{}".format('anchor_units_metadata'), kwargs.get('anchor_units_metadata', None)) setattr(self, "_{}".format('anchor_x_offset'), kwargs.get('anchor_x_offset', None)) setattr(self, "_{}".format('anchor_x_offset_metadata'), kwargs.get('anchor_x_offset_metadata', None)) setattr(self, "_{}".format('anchor_y_offset'), kwargs.get('anchor_y_offset', None)) setattr(self, "_{}".format('anchor_y_offset_metadata'), kwargs.get('anchor_y_offset_metadata', None)) setattr(self, "_{}".format('bold'), kwargs.get('bold', None)) setattr(self, "_{}".format('bold_metadata'), kwargs.get('bold_metadata', None)) setattr(self, "_{}".format('conceal_value_on_document'), kwargs.get('conceal_value_on_document', None)) setattr(self, "_{}".format('conceal_value_on_document_metadata'), kwargs.get('conceal_value_on_document_metadata', None)) setattr(self, "_{}".format('conditional_parent_label'), kwargs.get('conditional_parent_label', None)) setattr(self, "_{}".format('conditional_parent_label_metadata'), kwargs.get('conditional_parent_label_metadata', None)) setattr(self, "_{}".format('conditional_parent_value'), kwargs.get('conditional_parent_value', None)) setattr(self, "_{}".format('conditional_parent_value_metadata'), kwargs.get('conditional_parent_value_metadata', None)) setattr(self, "_{}".format('custom_tab_id'), kwargs.get('custom_tab_id', None)) setattr(self, "_{}".format('custom_tab_id_metadata'), kwargs.get('custom_tab_id_metadata', None)) setattr(self, "_{}".format('disable_auto_size'), kwargs.get('disable_auto_size', None)) setattr(self, "_{}".format('disable_auto_size_metadata'), kwargs.get('disable_auto_size_metadata', None)) setattr(self, "_{}".format('document_id'), kwargs.get('document_id', None)) setattr(self, "_{}".format('document_id_metadata'), kwargs.get('document_id_metadata', None)) setattr(self, "_{}".format('error_details'), kwargs.get('error_details', None)) setattr(self, "_{}".format('font'), kwargs.get('font', None)) setattr(self, "_{}".format('font_color'), kwargs.get('font_color', None)) setattr(self, "_{}".format('font_color_metadata'), kwargs.get('font_color_metadata', None)) setattr(self, "_{}".format('font_metadata'), kwargs.get('font_metadata', None)) setattr(self, "_{}".format('font_size'), kwargs.get('font_size', None)) setattr(self, "_{}".format('font_size_metadata'), kwargs.get('font_size_metadata', None)) setattr(self, "_{}".format('form_order'), kwargs.get('form_order', None)) setattr(self, "_{}".format('form_order_metadata'), kwargs.get('form_order_metadata', None)) setattr(self, "_{}".format('form_page_label'), kwargs.get('form_page_label', None)) setattr(self, "_{}".format('form_page_label_metadata'), kwargs.get('form_page_label_metadata', None)) setattr(self, "_{}".format('form_page_number'), kwargs.get('form_page_number', None)) setattr(self, "_{}".format('form_page_number_metadata'), kwargs.get('form_page_number_metadata', None)) setattr(self, "_{}".format('height'), kwargs.get('height', None)) setattr(self, "_{}".format('height_metadata'), kwargs.get('height_metadata', None)) setattr(self, "_{}".format('italic'), kwargs.get('italic', None)) setattr(self, "_{}".format('italic_metadata'), kwargs.get('italic_metadata', None)) setattr(self, "_{}".format('locale_policy'), kwargs.get('locale_policy', None)) setattr(self, "_{}".format('locked'), kwargs.get('locked', None)) setattr(self, "_{}".format('locked_metadata'), kwargs.get('locked_metadata', None)) setattr(self, "_{}".format('max_length'), kwargs.get('max_length', None)) setattr(self, "_{}".format('max_length_metadata'), kwargs.get('max_length_metadata', None)) setattr(self, "_{}".format('merge_field'), kwargs.get('merge_field', None)) setattr(self, "_{}".format('merge_field_xml'), kwargs.get('merge_field_xml', None)) setattr(self, "_{}".format('name'), kwargs.get('name', None)) setattr(self, "_{}".format('name_metadata'), kwargs.get('name_metadata', None)) setattr(self, "_{}".format('numerical_value'), kwargs.get('numerical_value', None)) setattr(self, "_{}".format('original_value'), kwargs.get('original_value', None)) setattr(self, "_{}".format('original_value_metadata'), kwargs.get('original_value_metadata', None)) setattr(self, "_{}".format('page_number'), kwargs.get('page_number', None)) setattr(self, "_{}".format('page_number_metadata'), kwargs.get('page_number_metadata', None)) setattr(self, "_{}".format('recipient_id'), kwargs.get('recipient_id', None)) setattr(self, "_{}".format('recipient_id_guid'), kwargs.get('recipient_id_guid', None)) setattr(self, "_{}".format('recipient_id_guid_metadata'), kwargs.get('recipient_id_guid_metadata', None)) setattr(self, "_{}".format('recipient_id_metadata'), kwargs.get('recipient_id_metadata', None)) setattr(self, "_{}".format('require_all'), kwargs.get('require_all', None)) setattr(self, "_{}".format('require_all_metadata'), kwargs.get('require_all_metadata', None)) setattr(self, "_{}".format('required'), kwargs.get('required', None)) setattr(self, "_{}".format('required_metadata'), kwargs.get('required_metadata', None)) setattr(self, "_{}".format('require_initial_on_shared_change'), kwargs.get('require_initial_on_shared_change', None)) setattr(self, "_{}".format('require_initial_on_shared_change_metadata'), kwargs.get('require_initial_on_shared_change_metadata', None)) setattr(self, "_{}".format('sender_required'), kwargs.get('sender_required', None)) setattr(self, "_{}".format('sender_required_metadata'), kwargs.get('sender_required_metadata', None)) setattr(self, "_{}".format('shared'), kwargs.get('shared', None)) setattr(self, "_{}".format('shared_metadata'), kwargs.get('shared_metadata', None)) setattr(self, "_{}".format('smart_contract_information'), kwargs.get('smart_contract_information', None)) setattr(self, "_{}".format('source'), kwargs.get('source', None)) setattr(self, "_{}".format('status'), kwargs.get('status', None)) setattr(self, "_{}".format('status_metadata'), kwargs.get('status_metadata', None)) setattr(self, "_{}".format('tab_group_labels'), kwargs.get('tab_group_labels', None)) setattr(self, "_{}".format('tab_group_labels_metadata'), kwargs.get('tab_group_labels_metadata', None)) setattr(self, "_{}".format('tab_id'), kwargs.get('tab_id', None)) setattr(self, "_{}".format('tab_id_metadata'), kwargs.get('tab_id_metadata', None)) setattr(self, "_{}".format('tab_label'), kwargs.get('tab_label', None)) setattr(self, "_{}".format('tab_label_metadata'), kwargs.get('tab_label_metadata', None)) setattr(self, "_{}".format('tab_order'), kwargs.get('tab_order', None)) setattr(self, "_{}".format('tab_order_metadata'), kwargs.get('tab_order_metadata', None)) setattr(self, "_{}".format('tab_type'), kwargs.get('tab_type', None)) setattr(self, "_{}".format('tab_type_metadata'), kwargs.get('tab_type_metadata', None)) setattr(self, "_{}".format('template_locked'), kwargs.get('template_locked', None)) setattr(self, "_{}".format('template_locked_metadata'), kwargs.get('template_locked_metadata', None)) setattr(self, "_{}".format('template_required'), kwargs.get('template_required', None)) setattr(self, "_{}".format('template_required_metadata'), kwargs.get('template_required_metadata', None)) setattr(self, "_{}".format('tooltip'), kwargs.get('tooltip', None)) setattr(self, "_{}".format('tool_tip_metadata'), kwargs.get('tool_tip_metadata', None)) setattr(self, "_{}".format('underline'), kwargs.get('underline', None)) setattr(self, "_{}".format('underline_metadata'), kwargs.get('underline_metadata', None)) setattr(self, "_{}".format('value'), kwargs.get('value', None)) setattr(self, "_{}".format('value_metadata'), kwargs.get('value_metadata', None)) setattr(self, "_{}".format('width'), kwargs.get('width', None)) setattr(self, "_{}".format('width_metadata'), kwargs.get('width_metadata', None)) setattr(self, "_{}".format('x_position'), kwargs.get('x_position', None)) setattr(self, "_{}".format('x_position_metadata'), kwargs.get('x_position_metadata', None)) setattr(self, "_{}".format('y_position'), kwargs.get('y_position', None)) setattr(self, "_{}".format('y_position_metadata'), kwargs.get('y_position_metadata', None)) @property def anchor_allow_white_space_in_characters(self): return self._anchor_allow_white_space_in_characters @anchor_allow_white_space_in_characters.setter def anchor_allow_white_space_in_characters(self, anchor_allow_white_space_in_characters): self._anchor_allow_white_space_in_characters = anchor_allow_white_space_in_characters @property def anchor_allow_white_space_in_characters_metadata(self): return self._anchor_allow_white_space_in_characters_metadata @anchor_allow_white_space_in_characters_metadata.setter def anchor_allow_white_space_in_characters_metadata(self, anchor_allow_white_space_in_characters_metadata): self._anchor_allow_white_space_in_characters_metadata = anchor_allow_white_space_in_characters_metadata @property def anchor_case_sensitive(self): return self._anchor_case_sensitive @anchor_case_sensitive.setter def anchor_case_sensitive(self, anchor_case_sensitive): self._anchor_case_sensitive = anchor_case_sensitive @property def anchor_case_sensitive_metadata(self): return self._anchor_case_sensitive_metadata @anchor_case_sensitive_metadata.setter def anchor_case_sensitive_metadata(self, anchor_case_sensitive_metadata): self._anchor_case_sensitive_metadata = anchor_case_sensitive_metadata @property def anchor_horizontal_alignment(self): return self._anchor_horizontal_alignment @anchor_horizontal_alignment.setter def anchor_horizontal_alignment(self, anchor_horizontal_alignment): self._anchor_horizontal_alignment = anchor_horizontal_alignment @property def anchor_horizontal_alignment_metadata(self): return self._anchor_horizontal_alignment_metadata @anchor_horizontal_alignment_metadata.setter def anchor_horizontal_alignment_metadata(self, anchor_horizontal_alignment_metadata): self._anchor_horizontal_alignment_metadata = anchor_horizontal_alignment_metadata @property def anchor_ignore_if_not_present(self): return self._anchor_ignore_if_not_present @anchor_ignore_if_not_present.setter def anchor_ignore_if_not_present(self, anchor_ignore_if_not_present): self._anchor_ignore_if_not_present = anchor_ignore_if_not_present @property def anchor_ignore_if_not_present_metadata(self): return self._anchor_ignore_if_not_present_metadata @anchor_ignore_if_not_present_metadata.setter def anchor_ignore_if_not_present_metadata(self, anchor_ignore_if_not_present_metadata): self._anchor_ignore_if_not_present_metadata = anchor_ignore_if_not_present_metadata @property def anchor_match_whole_word(self): return self._anchor_match_whole_word @anchor_match_whole_word.setter def anchor_match_whole_word(self, anchor_match_whole_word): self._anchor_match_whole_word = anchor_match_whole_word @property def anchor_match_whole_word_metadata(self): return self._anchor_match_whole_word_metadata @anchor_match_whole_word_metadata.setter def anchor_match_whole_word_metadata(self, anchor_match_whole_word_metadata): self._anchor_match_whole_word_metadata = anchor_match_whole_word_metadata @property def anchor_string(self): return self._anchor_string @anchor_string.setter def anchor_string(self, anchor_string): self._anchor_string = anchor_string @property def anchor_string_metadata(self): return self._anchor_string_metadata @anchor_string_metadata.setter def anchor_string_metadata(self, anchor_string_metadata): self._anchor_string_metadata = anchor_string_metadata @property def anchor_tab_processor_version(self): return self._anchor_tab_processor_version @anchor_tab_processor_version.setter def anchor_tab_processor_version(self, anchor_tab_processor_version): self._anchor_tab_processor_version = anchor_tab_processor_version @property def anchor_tab_processor_version_metadata(self): return self._anchor_tab_processor_version_metadata @anchor_tab_processor_version_metadata.setter def anchor_tab_processor_version_metadata(self, anchor_tab_processor_version_metadata): self._anchor_tab_processor_version_metadata = anchor_tab_processor_version_metadata @property def anchor_units(self): return self._anchor_units @anchor_units.setter
MIT License
ninthdevilhaunster/arknightsautohelper
vendor/penguin_client/penguin_client/models/event_period.py
EventPeriod.label_i18n
python
def label_i18n(self): return self._label_i18n
Gets the label_i18n of this EventPeriod. # noqa: E501 :return: The label_i18n of this EventPeriod. # noqa: E501 :rtype: dict(str, str)
https://github.com/ninthdevilhaunster/arknightsautohelper/blob/d24b4e22a73b333c1acc152556566efad4e94c04/vendor/penguin_client/penguin_client/models/event_period.py#L134-L141
import pprint import re import six class EventPeriod(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'conditions': 'ExistConditions', 'end': 'int', 'existence': 'dict(str, Existence)', 'label_i18n': 'dict(str, str)', 'start': 'int' } attribute_map = { 'conditions': 'conditions', 'end': 'end', 'existence': 'existence', 'label_i18n': 'label_i18n', 'start': 'start' } def __init__(self, conditions=None, end=None, existence=None, label_i18n=None, start=None): self._conditions = None self._end = None self._existence = None self._label_i18n = None self._start = None self.discriminator = None if conditions is not None: self.conditions = conditions if end is not None: self.end = end if existence is not None: self.existence = existence if label_i18n is not None: self.label_i18n = label_i18n if start is not None: self.start = start @property def conditions(self): return self._conditions @conditions.setter def conditions(self, conditions): self._conditions = conditions @property def end(self): return self._end @end.setter def end(self, end): self._end = end @property def existence(self): return self._existence @existence.setter def existence(self, existence): self._existence = existence @property
MIT License
openstack/tempest
tempest/lib/services/identity/v3/oauth_token_client.py
OAUTHTokenClient.create_access_token
python
def create_access_token(self, consumer_key, consumer_secret, request_key, request_secret, oauth_verifier): endpoint = 'OS-OAUTH1/access_token' oauth_params = self._generate_params_with_signature( consumer_key, self.base_url + '/' + endpoint, client_secret=consumer_secret, resource_owner_key=request_key, resource_owner_secret=request_secret, verifier=oauth_verifier, http_method='POST') headers = self._generate_oauth_header(oauth_params) resp, body = self.post(endpoint, body=None, headers=headers) self.expected_success(201, resp.status) if not isinstance(body, str): body = body.decode('utf-8') body = dict(item.split("=") for item in body.split("&")) return rest_client.ResponseBody(resp, body)
Create access token. For more information, please refer to the official API reference: https://docs.openstack.org/api-ref/identity/v3-ext/#create-access-token
https://github.com/openstack/tempest/blob/d458bf329739ae7b7652d329e6415ad6ba54e490/tempest/lib/services/identity/v3/oauth_token_client.py#L154-L176
import binascii import hashlib import hmac import random import time from urllib import parse as urlparse from oslo_serialization import jsonutils as json from tempest.lib.common import rest_client class OAUTHTokenClient(rest_client.RestClient): api_version = "v3" def _escape(self, s): safe = b'~' s = s.encode('utf-8') if isinstance(s, str) else s s = urlparse.quote(s, safe) if isinstance(s, bytes): s = s.decode('utf-8') return s def _generate_params_with_signature(self, client_key, uri, client_secret=None, resource_owner_key=None, resource_owner_secret=None, callback_uri=None, verifier=None, http_method='GET'): timestamp = str(int(time.time())) nonce = str(random.getrandbits(64)) + timestamp oauth_params = [ ('oauth_nonce', nonce), ('oauth_timestamp', timestamp), ('oauth_version', '1.0'), ('oauth_signature_method', 'HMAC-SHA1'), ('oauth_consumer_key', client_key), ] if resource_owner_key: oauth_params.append(('oauth_token', resource_owner_key)) if callback_uri: oauth_params.append(('oauth_callback', callback_uri)) if verifier: oauth_params.append(('oauth_verifier', verifier)) key_values = [(self._escape(k), self._escape(v)) for k, v in oauth_params] key_values.sort() parameter_parts = ['{0}={1}'.format(k, v) for k, v in key_values] normalized_params = '&'.join(parameter_parts) scheme, netloc, path, params, _, _ = urlparse.urlparse(uri) scheme = scheme.lower() netloc = netloc.lower() path = path.replace('//', '/') normalized_uri = urlparse.urlunparse((scheme, netloc, path, params, '', '')) base_string = self._escape(http_method.upper()) base_string += '&' base_string += self._escape(normalized_uri) base_string += '&' base_string += self._escape(normalized_params) key = self._escape(client_secret or '') key += '&' key += self._escape(resource_owner_secret or '') key_utf8 = key.encode('utf-8') text_utf8 = base_string.encode('utf-8') signature = hmac.new(key_utf8, text_utf8, hashlib.sha1) sig = binascii.b2a_base64(signature.digest())[:-1].decode('utf-8') oauth_params.append(('oauth_signature', sig)) return oauth_params def _generate_oauth_header(self, oauth_params): authorization_header = {} authorization_header_parameters_parts = [] for oauth_parameter_name, value in oauth_params: escaped_name = self._escape(oauth_parameter_name) escaped_value = self._escape(value) part = '{0}="{1}"'.format(escaped_name, escaped_value) authorization_header_parameters_parts.append(part) authorization_header_parameters = ', '.join( authorization_header_parameters_parts) oauth_string = 'OAuth %s' % authorization_header_parameters authorization_header['Authorization'] = oauth_string return authorization_header def create_request_token(self, consumer_key, consumer_secret, project_id): endpoint = 'OS-OAUTH1/request_token' headers = {'Requested-Project-Id': project_id} oauth_params = self._generate_params_with_signature( consumer_key, self.base_url + '/' + endpoint, client_secret=consumer_secret, callback_uri='oob', http_method='POST') oauth_header = self._generate_oauth_header(oauth_params) headers.update(oauth_header) resp, body = self.post(endpoint, body=None, headers=headers) self.expected_success(201, resp.status) if not isinstance(body, str): body = body.decode('utf-8') body = dict(item.split("=") for item in body.split("&")) return rest_client.ResponseBody(resp, body) def authorize_request_token(self, request_token_id, role_ids): roles = [{'id': role_id} for role_id in role_ids] body = {'roles': roles} post_body = json.dumps(body) resp, body = self.put("OS-OAUTH1/authorize/%s" % request_token_id, post_body) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
Apache License 2.0
decentfox/aioh2
aioh2/protocol.py
H2Protocol.initial_window_size
python
def initial_window_size(self): return self._conn.local_settings.initial_window_size
Self initial window size (in octets) for stream-level flow control. Setting a larger value may cause the inbound buffer increase, and allow more data to be received. Setting with a smaller value does not decrease the buffer immediately, but may prevent the peer from sending more data to overflow the buffer for a while. However, it is still up to the peer whether to respect this setting or not.
https://github.com/decentfox/aioh2/blob/2f9b76161e99e32317083cd2ebd17ce2ed3e41ab/aioh2/protocol.py#L678-L688
import struct from collections import deque from logging import getLogger import asyncio import priority from h2 import events from h2 import settings from h2.config import H2Configuration from h2.connection import H2Connection from h2.exceptions import NoSuchStreamError, StreamClosedError, ProtocolError from . import exceptions, async_task __all__ = ['H2Protocol'] logger = getLogger(__package__) @asyncio.coroutine def _wait_for_events(*events_): while not all([event.is_set() for event in events_]): yield from asyncio.wait([event.wait() for event in events_]) class _StreamEndedException(Exception): def __init__(self, bufs=None): if bufs is None: bufs = [] self.bufs = bufs class CallableEvent(asyncio.Event): def __init__(self, func, *, loop=None): super().__init__(loop=loop) self._func = func @asyncio.coroutine def wait(self): while not self._func(): self.clear() yield from super().wait() def sync(self): if self._func(): self.set() else: self.clear() def is_set(self): self.sync() return super().is_set() class H2Stream: def __init__(self, stream_id, window_getter, loop=None): if loop is None: loop = asyncio.get_event_loop() self._stream_id = stream_id self._window_getter = window_getter self._wlock = asyncio.Lock(loop=loop) self._window_open = CallableEvent(self._is_window_open, loop=loop) self._rlock = asyncio.Lock(loop=loop) self._buffers = deque() self._buffer_size = 0 self._buffer_ready = asyncio.Event(loop=loop) self._response = asyncio.Future(loop=loop) self._trailers = asyncio.Future(loop=loop) self._eof_received = False self._closed = False @property def id(self): return self._stream_id @property def window_open(self): return self._window_open @property def rlock(self): return self._rlock @property def wlock(self): return self._wlock @property def buffer_size(self): return self._buffer_size @property def response(self): return self._response @property def trailers(self): return self._trailers def _is_window_open(self): try: window = self._window_getter(self._stream_id) except NoSuchStreamError: self._closed = True return True else: return window > 0 def feed_data(self, data): if data: self._buffers.append(data) self._buffer_size += len(data) self._buffer_ready.set() def feed_eof(self): self._eof_received = True self._buffer_ready.set() self.feed_trailers({}) def feed_response(self, headers): self._response.set_result(headers) def feed_trailers(self, headers): if not self._trailers.done(): self._trailers.set_result(headers) @asyncio.coroutine def read_frame(self): yield from self._buffer_ready.wait() rv = b'' if self._buffers: rv = self._buffers.popleft() self._buffer_size -= len(rv) if not self._buffers: if self._eof_received: raise _StreamEndedException([rv]) else: self._buffer_ready.clear() return rv @asyncio.coroutine def read_all(self): yield from self._buffer_ready.wait() rv = [] rv.extend(self._buffers) self._buffers.clear() self._buffer_size = 0 if self._eof_received: raise _StreamEndedException(rv) else: self._buffer_ready.clear() return rv @asyncio.coroutine def read(self, n): yield from self._buffer_ready.wait() rv = [] count = 0 while n > count and self._buffers: buf = self._buffers.popleft() count += len(buf) if n < count: rv.append(buf[:n - count]) self._buffers.appendleft(buf[n - count:]) count = n else: rv.append(buf) self._buffer_size -= count if not self._buffers: if self._eof_received: raise _StreamEndedException(rv) else: self._buffer_ready.clear() return rv, count class H2Protocol(asyncio.Protocol): def __init__(self, client_side: bool, *, loop=None, concurrency=8, functional_timeout=2): if loop is None: loop = asyncio.get_event_loop() self._loop = loop config = H2Configuration(client_side=client_side, header_encoding='utf-8') self._conn = H2Connection(config=config) self._transport = None self._streams = {} self._inbound_requests = asyncio.Queue(concurrency, loop=loop) self._priority = priority.PriorityTree() self._priority_events = {} self._handler = None self._is_resumed = False self._resumed = CallableEvent(lambda: self._is_resumed, loop=loop) self._stream_creatable = CallableEvent(self._is_stream_creatable, loop=loop) self._last_active = 0 self._ping_index = -1 self._ping_time = 0 self._rtt = None self._functional_timeout = functional_timeout self._functional = CallableEvent(self._is_functional, loop=loop) self._event_handlers = { events.RequestReceived: self._request_received, events.ResponseReceived: self._response_received, events.TrailersReceived: self._trailers_received, events.DataReceived: self._data_received, events.WindowUpdated: self._window_updated, events.RemoteSettingsChanged: self._remote_settings_changed, events.PingAcknowledged: self._ping_acknowledged, events.StreamEnded: self._stream_ended, events.StreamReset: self._stream_reset, events.PushedStreamReceived: self._pushed_stream_received, events.SettingsAcknowledged: self._settings_acknowledged, events.PriorityUpdated: self._priority_updated, events.ConnectionTerminated: self._connection_terminated, } def connection_made(self, transport): self._transport = transport self._conn.initiate_connection() self._conn.update_settings({ settings.SettingCodes.MAX_CONCURRENT_STREAMS: self._inbound_requests.maxsize}) self._flush() self._stream_creatable.sync() self.resume_writing() self._last_active = self._loop.time() self._functional.sync() def connection_lost(self, exc): self._conn = None self._transport = None self.pause_writing() if self._handler: self._handler.cancel() def pause_writing(self): self._is_resumed = False self._resumed.sync() def resume_writing(self): self._is_resumed = True self._resumed.sync() def data_received(self, data): self._last_active = self._loop.time() self._functional.sync() events_ = self._conn.receive_data(data) self._flush() for event in events_: self._event_received(event) def eof_received(self): self._conn.close_connection() self._flush() def _event_received(self, event): self._event_handlers[type(event)](event) def _request_received(self, event: events.RequestReceived): self._inbound_requests.put_nowait((0, event.stream_id, event.headers)) self._priority.insert_stream(event.stream_id) self._priority.block(event.stream_id) def _response_received(self, event: events.ResponseReceived): self._get_stream(event.stream_id).feed_response(event.headers) def _trailers_received(self, event: events.TrailersReceived): self._get_stream(event.stream_id).feed_trailers(event.headers) def _data_received(self, event: events.DataReceived): self._get_stream(event.stream_id).feed_data(event.data) if self._conn.inbound_flow_control_window < 1073741823: self._conn.increment_flow_control_window( 2 ** 31 - 1 - self._conn.inbound_flow_control_window) self._flush() def _window_updated(self, event: events.WindowUpdated): if event.stream_id: self._get_stream(event.stream_id).window_open.sync() else: for stream in list(self._streams.values()): stream.window_open.sync() def _remote_settings_changed(self, event: events.RemoteSettingsChanged): if settings.SettingCodes.INITIAL_WINDOW_SIZE in event.changed_settings: for stream in list(self._streams.values()): stream.window_open.sync() if settings.SettingCodes.MAX_CONCURRENT_STREAMS in event.changed_settings: self._stream_creatable.sync() def _ping_acknowledged(self, event: events.PingAcknowledged): if struct.unpack('Q', event.ping_data) == (self._ping_index,): self._rtt = self._loop.time() - self._ping_time def _stream_ended(self, event: events.StreamEnded): self._get_stream(event.stream_id).feed_eof() self._stream_creatable.sync() def _stream_reset(self, event: events.StreamReset): self._get_stream(event.stream_id).window_open.set() self._stream_creatable.sync() def _pushed_stream_received(self, event: events.PushedStreamReceived): pass def _settings_acknowledged(self, event: events.SettingsAcknowledged): pass def _priority_updated(self, event: events.PriorityUpdated): self._priority.reprioritize( event.stream_id, event.depends_on, event.weight, event.exclusive) def _connection_terminated(self, event: events.ConnectionTerminated): logger.warning('Remote peer sent GOAWAY [ERR: %s], disconnect now.', event.error_code) self._transport.close() def _get_stream(self, stream_id): stream = self._streams.get(stream_id) if stream is None: stream = self._streams[stream_id] = H2Stream( stream_id, self._conn.local_flow_control_window, loop=self._loop) return stream def _flush(self): self._transport.write(self._conn.data_to_send()) def _is_stream_creatable(self): return (self._conn.open_outbound_streams < self._conn.remote_settings.max_concurrent_streams) def _flow_control(self, stream_id): delta = (self._conn.local_settings.initial_window_size - self._get_stream(stream_id).buffer_size - self._conn.remote_flow_control_window(stream_id)) if delta > 0: self._conn.increment_flow_control_window(delta, stream_id) self._flush() def _is_functional(self): return self._last_active + self._functional_timeout > self._loop.time() def _priority_step(self): try: for stream_id in self._priority: fut = self._priority_events.pop(stream_id, None) if fut is not None: fut.set_result(None) break except Exception: if self._priority_events: self._priority_events.popitem()[1].set_result(None) def set_handler(self, handler): if self._handler: raise Exception('Handler was already set') if handler: self._handler = async_task(handler, loop=self._loop) def close_connection(self): self._transport.close() @asyncio.coroutine def start_request(self, headers, *, end_stream=False): yield from _wait_for_events(self._resumed, self._stream_creatable) stream_id = self._conn.get_next_available_stream_id() self._priority.insert_stream(stream_id) self._priority.block(stream_id) self._conn.send_headers(stream_id, headers, end_stream=end_stream) self._flush() return stream_id @asyncio.coroutine def start_response(self, stream_id, headers, *, end_stream=False): yield from self._resumed.wait() self._conn.send_headers(stream_id, headers, end_stream=end_stream) self._flush() @asyncio.coroutine def send_data(self, stream_id, data, *, end_stream=False): try: with (yield from self._get_stream(stream_id).wlock): while True: yield from _wait_for_events( self._resumed, self._get_stream(stream_id).window_open) self._priority.unblock(stream_id) waiter = asyncio.Future() if not self._priority_events: self._loop.call_soon(self._priority_step) self._priority_events[stream_id] = waiter try: yield from waiter data_size = len(data) size = min( data_size, self._conn.local_flow_control_window(stream_id), self._conn.max_outbound_frame_size) if data_size == 0 or size == data_size: self._conn.send_data(stream_id, data, end_stream=end_stream) self._flush() break elif size > 0: self._conn.send_data(stream_id, data[:size]) data = data[size:] self._flush() finally: self._priority_events.pop(stream_id, None) self._priority.block(stream_id) if self._priority_events: self._loop.call_soon(self._priority_step) except ProtocolError: raise exceptions.SendException(data) @asyncio.coroutine def send_trailers(self, stream_id, headers): with (yield from self._get_stream(stream_id).wlock): yield from self._resumed.wait() self._conn.send_headers(stream_id, headers, end_stream=True) self._flush() @asyncio.coroutine def end_stream(self, stream_id): with (yield from self._get_stream(stream_id).wlock): yield from self._resumed.wait() self._conn.end_stream(stream_id) self._flush() @asyncio.coroutine def recv_request(self): rv = yield from self._inbound_requests.get() return rv[1:] @asyncio.coroutine def recv_response(self, stream_id): return (yield from self._get_stream(stream_id).response) @asyncio.coroutine def recv_trailers(self, stream_id): return (yield from self._get_stream(stream_id).trailers) @asyncio.coroutine def read_stream(self, stream_id, size=None): rv = [] try: with (yield from self._get_stream(stream_id).rlock): if size is None: rv.append(( yield from self._get_stream(stream_id).read_frame())) self._flow_control(stream_id) elif size < 0: while True: rv.extend(( yield from self._get_stream(stream_id).read_all())) self._flow_control(stream_id) else: while size > 0: bufs, count = yield from self._get_stream( stream_id).read(size) rv.extend(bufs) size -= count self._flow_control(stream_id) except StreamClosedError: pass except _StreamEndedException as e: try: self._flow_control(stream_id) except StreamClosedError: pass rv.extend(e.bufs) return b''.join(rv) def update_settings(self, new_settings): self._conn.update_settings(new_settings) self._flush() @asyncio.coroutine def wait_functional(self): while not self._is_functional(): self._rtt = None self._ping_index += 1 self._ping_time = self._loop.time() self._conn.ping(struct.pack('Q', self._ping_index)) self._flush() try: yield from asyncio.wait_for(self._functional.wait(), self._functional_timeout) except asyncio.TimeoutError: pass return self._rtt def reprioritize(self, stream_id, depends_on=None, weight=16, exclusive=False): self._priority.reprioritize(stream_id, depends_on, weight, exclusive) @property def functional_timeout(self): return self._functional_timeout @functional_timeout.setter def functional_timeout(self, val): self._functional_timeout = val self._functional.sync() @property
BSD 3-Clause New or Revised License
jamesturk/scrapelib
scrapelib/__init__.py
CachingSession.should_cache_response
python
def should_cache_response(self, response: Response) -> bool: return response.status_code == 200
Check if a given Response object should be cached. Default behavior is to only cache responses with a 200 status code.
https://github.com/jamesturk/scrapelib/blob/a4332f2afb670e8742a7712bf58ffb03b0c12952/scrapelib/__init__.py#L329-L335
import logging import os import tempfile import time from urllib.request import urlopen as urllib_urlopen from urllib.error import URLError from typing import ( Any, Callable, Container, IO, Mapping, MutableMapping, Optional, Text, Tuple, Union, cast, ) import requests from .cache import ( CacheStorageBase, FileCache, SQLiteCache, MemoryCache, CacheResponse, ) from ._types import ( _Data, PreparedRequest, RequestsCookieJar, _HooksInput, _AuthType, Response, ) __version__ = "2.0.6" _user_agent = " ".join(("scrapelib", __version__, requests.utils.default_user_agent())) _log = logging.getLogger("scrapelib") _log.addHandler(logging.NullHandler()) class HTTPMethodUnavailableError(requests.RequestException): def __init__(self, message: str, method: str): super().__init__(message) self.method = method class HTTPError(requests.HTTPError): def __init__(self, response: Response, body: dict = None): message = "%s while retrieving %s" % (response.status_code, response.url) super().__init__(message) self.response = response self.body = body or self.response.text class FTPError(requests.HTTPError): def __init__(self, url: str): message = "error while retrieving %s" % url super().__init__(message) class RetrySession(requests.Session): def __init__(self) -> None: super().__init__() self._retry_attempts = 0 self.retry_wait_seconds: float = 10 @property def retry_attempts(self) -> int: return self._retry_attempts @retry_attempts.setter def retry_attempts(self, value: int) -> None: self._retry_attempts = max(value, 0) def accept_response(self, response: Response, **kwargs: dict) -> bool: return response.status_code < 400 def request( self, method: str, url: Union[str, bytes, Text], params: Union[None, bytes, MutableMapping[Text, Text]] = None, data: _Data = None, headers: Optional[MutableMapping[Text, Text]] = None, cookies: Union[None, RequestsCookieJar, MutableMapping[Text, Text]] = None, files: Optional[MutableMapping[Text, IO[Any]]] = None, auth: _AuthType = None, timeout: Union[None, float, Tuple[float, float], Tuple[float, None]] = None, allow_redirects: Optional[bool] = True, proxies: Optional[MutableMapping[Text, Text]] = None, hooks: Optional[_HooksInput] = None, stream: Optional[bool] = None, verify: Union[None, bool, Text] = True, cert: Union[Text, Tuple[Text, Text], None] = None, json: Optional[Any] = None, retry_on_404: bool = False, ) -> Response: tries = 0 exception_raised = None while tries <= self.retry_attempts: exception_raised = None try: resp = super().request( method, url, params=params, data=data, headers=headers, cookies=cookies, files=files, auth=auth, timeout=timeout, allow_redirects=allow_redirects, proxies=proxies, hooks=hooks, stream=stream, verify=verify, cert=cert, json=json, ) if self.accept_response(resp) or ( resp.status_code == 404 and not retry_on_404 ): break except Exception as e: if isinstance(e, requests.exceptions.SSLError): raise exception_raised = e tries += 1 if tries <= self.retry_attempts: wait = self.retry_wait_seconds * (2 ** (tries - 1)) _log.debug("sleeping for %s seconds before retry" % wait) if exception_raised: _log.warning( "got %s sleeping for %s seconds before retry", exception_raised, wait, ) else: _log.warning("sleeping for %s seconds before retry", wait) time.sleep(wait) if exception_raised: raise exception_raised return resp class ThrottledSession(RetrySession): _last_request: float _throttled: bool = False def _throttle(self) -> None: now = time.time() diff = self._request_frequency - (now - self._last_request) if diff > 0: _log.debug("sleeping for %fs" % diff) time.sleep(diff) self._last_request = time.time() else: self._last_request = now @property def requests_per_minute(self) -> int: return self._requests_per_minute @requests_per_minute.setter def requests_per_minute(self, value: int) -> None: if value > 0: self._throttled = True self._requests_per_minute = value self._request_frequency = 60.0 / value self._last_request = 0 else: self._throttled = False self._requests_per_minute = 0 self._request_frequency = 0.0 self._last_request = 0 def request( self, method: str, url: Union[str, bytes, Text], params: Union[None, bytes, MutableMapping[Text, Text]] = None, data: _Data = None, headers: Optional[MutableMapping[Text, Text]] = None, cookies: Union[None, RequestsCookieJar, MutableMapping[Text, Text]] = None, files: Optional[MutableMapping[Text, IO[Any]]] = None, auth: _AuthType = None, timeout: Union[None, float, Tuple[float, float], Tuple[float, None]] = None, allow_redirects: Optional[bool] = True, proxies: Optional[MutableMapping[Text, Text]] = None, hooks: Optional[_HooksInput] = None, stream: Optional[bool] = None, verify: Union[None, bool, Text] = True, cert: Union[Text, Tuple[Text, Text], None] = None, json: Optional[Any] = None, retry_on_404: bool = False, ) -> Response: if self._throttled: self._throttle() return super().request( method, url, params=params, data=data, headers=headers, cookies=cookies, files=files, auth=auth, timeout=timeout, allow_redirects=allow_redirects, proxies=proxies, hooks=hooks, stream=stream, verify=verify, cert=cert, json=json, retry_on_404=retry_on_404, ) class DummyObject(object): _original_response: "DummyObject" msg: "DummyObject" def getheaders(self, name: str) -> str: return "" def get_all(self, name: str, default: str) -> str: return default _dummy = DummyObject() _dummy._original_response = DummyObject() _dummy._original_response.msg = DummyObject() class FTPAdapter(requests.adapters.BaseAdapter): def send( self, request: PreparedRequest, stream: bool = False, timeout: Union[None, float, Tuple[float, float], Tuple[float, None]] = None, verify: Union[bool, str] = False, cert: Union[None, Union[bytes, Text], Container[Union[bytes, Text]]] = None, proxies: Optional[Mapping[str, str]] = None, ) -> Response: if request.method != "GET": raise HTTPMethodUnavailableError( "FTP requests do not support method '%s'" % request.method, cast(str, request.method), ) try: if isinstance(timeout, tuple): timeout_float = timeout[0] else: timeout_float = cast(float, timeout) real_resp = urllib_urlopen(cast(str, request.url), timeout=timeout_float) resp = requests.Response() resp.status_code = 200 resp.url = cast(str, request.url) resp.headers = requests.structures.CaseInsensitiveDict() resp._content = real_resp.read() resp.raw = _dummy return resp except URLError: raise FTPError(cast(str, request.url)) class CachingSession(ThrottledSession): def __init__(self, cache_storage: Optional[CacheStorageBase] = None) -> None: super().__init__() self.cache_storage = cache_storage self.cache_write_only = False def key_for_request( self, method: str, url: Union[str, bytes], params: Union[None, bytes, MutableMapping[Text, Text]] = None, ) -> Optional[str]: if method != "get": return None return requests.Request(url=url, params=params).prepare().url
BSD 2-Clause Simplified License
exopy/exopy
exopy/tasks/tasks/task_interface.py
InterfaceableInterfaceMixin.preferences_from_members
python
def preferences_from_members(self): prefs = super(InterfaceableInterfaceMixin, self).preferences_from_members() if self.interface: i_prefs = self.interface.preferences_from_members() prefs['interface'] = i_prefs return prefs
Update the values stored in the preference system.
https://github.com/exopy/exopy/blob/aeda9bcfad2d2f76903c7ad2800ea2110ff689b2/exopy/tasks/tasks/task_interface.py#L235-L246
from atom.api import (Atom, ForwardTyped, Typed, Str, Dict, Property, Constant) from ...utils.traceback import format_exc from ...utils.atom_util import HasPrefAtom, tagged_members from .base_tasks import BaseTask from . import validators DEP_TYPE = 'exopy.tasks.interface' class InterfaceableMixin(Atom): interface = ForwardTyped(lambda: BaseInterface) def check(self, *args, **kwargs): test = True traceback = {} err_path = self.get_error_path() if not self.interface and not hasattr(self, 'i_perform'): traceback[err_path + '-interface'] = 'Missing interface' return False, traceback if self.interface: i_test, i_traceback = self.interface.check(*args, **kwargs) traceback.update(i_traceback) test &= i_test res = super(InterfaceableMixin, self).check(*args, **kwargs) test &= res[0] traceback.update(res[1]) return test, traceback def prepare(self): super(InterfaceableMixin, self).prepare() if self.interface: self.interface.prepare() def perform(self, *args, **kwargs): if self.interface: return self.interface.perform(*args, **kwargs) else: return self.i_perform(*args, **kwargs) def traverse(self, depth=-1): it = super(InterfaceableMixin, self).traverse(depth) yield next(it) interface = self.interface if interface: if depth == 0: yield interface else: for i in interface.traverse(depth - 1): yield i for c in it: yield c @classmethod def build_from_config(cls, config, dependencies): new = super(InterfaceableMixin, cls).build_from_config(config, dependencies) if 'interface' in config: iclass = config['interface'].pop('interface_id') inter_class = dependencies[DEP_TYPE][iclass] new.interface = inter_class.build_from_config(config['interface'], dependencies) return new def get_error_path(self): raise NotImplementedError() class InterfaceableTaskMixin(InterfaceableMixin): def register_preferences(self): super(InterfaceableTaskMixin, self).register_preferences() if self.interface: prefs = self.interface.preferences_from_members() self.preferences['interface'] = prefs if len(self.preferences.sections) > 1: ind = self.preferences.sections.index('interface') del self.preferences.sections[ind] self.preferences.sections.insert(0, 'interface') def update_preferences_from_members(self): super(InterfaceableTaskMixin, self).update_preferences_from_members() if self.interface: prefs = self.interface.preferences_from_members() self.preferences['interface'] = prefs if len(self.preferences.sections) > 1: ind = self.preferences.sections.index('interface') del self.preferences.sections[ind] self.preferences.sections.insert(0, 'interface') def get_error_path(self): return self.path + '/' + self.name def _post_setattr_interface(self, old, new): new_entries = dict(self.database_entries) if old: inter = old inter.task = None for entry in inter.database_entries: new_entries.pop(entry, None) if new: inter = new inter.task = self if isinstance(inter, InterfaceableInterfaceMixin): inter._post_setattr_interface(None, inter.interface) for entry, value in inter.database_entries.items(): new_entries[entry] = value self.database_entries = new_entries class InterfaceableInterfaceMixin(InterfaceableMixin): def get_error_path(self): try: return self.parent.get_error_path() + '/' + type(self).__name__ except AttributeError: return '/'.join((self.task.path, self.task.name, type(self).__name__))
BSD 3-Clause New or Revised License
calebbell/thermo
thermo/eos_mix_methods.py
a_alpha_and_derivatives_quadratic_terms
python
def a_alpha_and_derivatives_quadratic_terms(a_alphas, a_alpha_roots, da_alpha_dTs, d2a_alpha_dT2s, T, zs, kijs, a_alpha_j_rows=None, da_alpha_dT_j_rows=None): N = len(a_alphas) a_alpha = da_alpha_dT = d2a_alpha_dT2 = 0.0 if a_alpha_j_rows is None: a_alpha_j_rows = [0.0]*N if da_alpha_dT_j_rows is None: da_alpha_dT_j_rows = [0.0]*N for i in range(N): kijs_i = kijs[i] a_alpha_i_root_i = a_alpha_roots[i] a_alphai = a_alphas[i] da_alpha_dT_i = da_alpha_dTs[i] d2a_alpha_dT2_i = d2a_alpha_dT2s[i] workingd1 = workings2 = 0.0 for j in range(i): v0 = a_alpha_i_root_i*a_alpha_roots[j] a_alpha_ijs_ij = (1. - kijs_i[j])*v0 t200 = a_alpha_ijs_ij*zs[i] a_alpha_j_rows[j] += t200 a_alpha_j_rows[i] += zs[j]*a_alpha_ijs_ij t200 *= zs[j] a_alpha += t200 + t200 a_alphaj = a_alphas[j] da_alpha_dT_j = da_alpha_dTs[j] zi_zj = zs[i]*zs[j] x1 = a_alphai*da_alpha_dT_j x2 = a_alphaj*da_alpha_dT_i x1_x2 = x1 + x2 kij_m1 = kijs_i[j] - 1.0 v0_inv = 1.0/v0 v1 = kij_m1*v0_inv da_alpha_dT_ij = x1_x2*v1 da_alpha_dT_j_rows[j] += zs[i]*da_alpha_dT_ij da_alpha_dT_j_rows[i] += zs[j]*da_alpha_dT_ij da_alpha_dT_ij *= zi_zj x0 = a_alphai*a_alphaj d2a_alpha_dT2_ij = v0_inv*v0_inv*v1*( (x0*( -0.5*(a_alphai*d2a_alpha_dT2s[j] + a_alphaj*d2a_alpha_dT2_i) - da_alpha_dT_i*da_alpha_dT_j) +.25*x1_x2*x1_x2)) d2a_alpha_dT2_ij *= zi_zj workingd1 += da_alpha_dT_ij workings2 += d2a_alpha_dT2_ij t200 = a_alphas[i]*zs[i] a_alpha_j_rows[i] += t200 a_alpha += t200*zs[i] zi_zj = zs[i]*zs[i] da_alpha_dT_ij = -da_alpha_dT_i - da_alpha_dT_i da_alpha_dT_j_rows[i] += zs[i]*da_alpha_dT_ij da_alpha_dT_ij *= zi_zj da_alpha_dT -= 0.5*(da_alpha_dT_ij + (workingd1 + workingd1)) d2a_alpha_dT2 += d2a_alpha_dT2_i*zi_zj + (workings2 + workings2) for i in range(N): da_alpha_dT_j_rows[i] *= -0.5 return a_alpha, da_alpha_dT, d2a_alpha_dT2, a_alpha_j_rows, da_alpha_dT_j_rows
r'''Calculates the `a_alpha` term, and its first two temperature derivatives, for an equation of state along with the vector quantities needed to compute the fugacitie and temperature derivatives of fugacities of the mixture. This routine is efficient in both numba and PyPy. .. math:: a \alpha = \sum_i \sum_j z_i z_j {(a\alpha)}_{ij} .. math:: \frac{\partial (a\alpha)}{\partial T} = \sum_i \sum_j z_i z_j \frac{\partial (a\alpha)_{ij}}{\partial T} .. math:: \frac{\partial^2 (a\alpha)}{\partial T^2} = \sum_i \sum_j z_i z_j \frac{\partial^2 (a\alpha)_{ij}}{\partial T^2} .. math:: (a\alpha)_{ij} = (1-k_{ij})\sqrt{(a\alpha)_{i}(a\alpha)_{j}} .. math:: \frac{\partial (a\alpha)_{ij}}{\partial T} = \frac{\sqrt{\operatorname{a\alpha_{i}}{\left(T \right)} \operatorname{a\alpha_{j}} {\left(T \right)}} \left(1 - k_{ij}\right) \left(\frac{\operatorname{a\alpha_{i}} {\left(T \right)} \frac{d}{d T} \operatorname{a\alpha_{j}}{\left(T \right)}}{2} + \frac{\operatorname{a\alpha_{j}}{\left(T \right)} \frac{d}{d T} \operatorname{ a\alpha_{i}}{\left(T \right)}}{2}\right)}{\operatorname{a\alpha_{i}}{\left(T \right)} \operatorname{a\alpha_{j}}{\left(T \right)}} .. math:: \frac{\partial^2 (a\alpha)_{ij}}{\partial T^2} = - \frac{\sqrt{\operatorname{a\alpha_{i}}{\left(T \right)} \operatorname{a\alpha_{j}} {\left(T \right)}} \left(k_{ij} - 1\right) \left(\frac{\left(\operatorname{ a\alpha_{i}}{\left(T \right)} \frac{d}{d T} \operatorname{a\alpha_{j}}{\left(T \right)} + \operatorname{a\alpha_{j}}{\left(T \right)} \frac{d}{d T} \operatorname{a\alpha_{i}} {\left(T \right)}\right)^{2}}{4 \operatorname{a\alpha_{i}}{\left(T \right)} \operatorname{a\alpha_{j}}{\left(T \right)}} - \frac{\left(\operatorname{a\alpha_{i}} {\left(T \right)} \frac{d}{d T} \operatorname{a\alpha_{j}}{\left(T \right)} + \operatorname{a\alpha_{j}}{\left(T \right)} \frac{d}{d T} \operatorname{a\alpha_{i}}{\left(T \right)}\right) \frac{d}{d T} \operatorname{a\alpha_{j}}{\left(T \right)}}{2 \operatorname{a\alpha_{j}} {\left(T \right)}} - \frac{\left(\operatorname{a\alpha_{i}}{\left(T \right)} \frac{d}{d T} \operatorname{a\alpha_{j}}{\left(T \right)} + \operatorname{a\alpha_{j}}{\left(T \right)} \frac{d}{d T} \operatorname{a\alpha_{i}}{\left(T \right)}\right) \frac{d}{d T} \operatorname{a\alpha_{i}}{\left(T \right)}}{2 \operatorname{a\alpha_{i}} {\left(T \right)}} + \frac{\operatorname{a\alpha_{i}}{\left(T \right)} \frac{d^{2}}{d T^{2}} \operatorname{a\alpha_{j}}{\left(T \right)}}{2} + \frac{\operatorname{a\alpha_{j}}{\left(T \right)} \frac{d^{2}}{d T^{2}} \operatorname{a\alpha_{i}}{\left(T \right)}}{2} + \frac{d}{d T} \operatorname{a\alpha_{i}}{\left(T \right)} \frac{d}{d T} \operatorname{a\alpha_{j}}{\left(T \right)}\right)} {\operatorname{a\alpha_{i}}{\left(T \right)} \operatorname{a\alpha_{j}} {\left(T \right)}} The secondary values are as follows: .. math:: \sum_i y_i(a\alpha)_{ij} .. math:: \sum_i y_i \frac{\partial (a\alpha)_{ij}}{\partial T} Parameters ---------- a_alphas : list[float] EOS attractive terms, [J^2/mol^2/Pa] a_alpha_roots : list[float] Square roots of `a_alphas`; provided for speed [J/mol/Pa^0.5] da_alpha_dTs : list[float] Temperature derivative of coefficient calculated by EOS-specific method, [J^2/mol^2/Pa/K] d2a_alpha_dT2s : list[float] Second temperature derivative of coefficient calculated by EOS-specific method, [J^2/mol^2/Pa/K**2] T : float Temperature, not used, [K] zs : list[float] Mole fractions of each species kijs : list[list[float]] Constant kijs, [-] Returns ------- a_alpha : float EOS attractive term, [J^2/mol^2/Pa] da_alpha_dT : float Temperature derivative of coefficient calculated by EOS-specific method, [J^2/mol^2/Pa/K] d2a_alpha_dT2 : float Second temperature derivative of coefficient calculated by EOS-specific method, [J^2/mol^2/Pa/K**2] a_alpha_j_rows : list[float] EOS attractive term row sums, [J^2/mol^2/Pa] da_alpha_dT_j_rows : list[float] Temperature derivative of EOS attractive term row sums, [J^2/mol^2/Pa/K] Notes ----- Examples -------- >>> kijs = [[0,.083],[0.083,0]] >>> zs = [0.1164203, 0.8835797] >>> a_alphas = [0.2491099357671155, 0.6486495863528039] >>> a_alpha_roots = [i**0.5 for i in a_alphas] >>> da_alpha_dTs = [-0.0005102028006086241, -0.0011131153520304886] >>> d2a_alpha_dT2s = [1.8651128859234162e-06, 3.884331923127011e-06] >>> a_alpha_and_derivatives_quadratic_terms(a_alphas, a_alpha_roots, da_alpha_dTs, d2a_alpha_dT2s, 299.0, zs, kijs) (0.58562139582, -0.001018667672, 3.56669817856e-06, [0.35469988173, 0.61604757237], [-0.000672387374, -0.001064293501])
https://github.com/calebbell/thermo/blob/554425bd7b6fae231b9659f09fce392f347505fd/thermo/eos_mix_methods.py#L556-L745
from fluids.constants import R from fluids.numerics import numpy as np, catanh from math import sqrt, log from thermo.eos import eos_lnphi from thermo.eos_volume import volume_solutions_halley, volume_solutions_fast __all__ = ['a_alpha_aijs_composition_independent', 'a_alpha_and_derivatives', 'a_alpha_and_derivatives_full', 'a_alpha_quadratic_terms', 'a_alpha_and_derivatives_quadratic_terms', 'PR_lnphis', 'VDW_lnphis', 'SRK_lnphis', 'eos_mix_lnphis_general', 'VDW_lnphis_fastest', 'PR_lnphis_fastest', 'SRK_lnphis_fastest', 'RK_lnphis_fastest', 'PR_translated_lnphis_fastest', 'G_dep_lnphi_d_helper', 'RK_d3delta_dninjnks', 'PR_ddelta_dzs', 'PR_ddelta_dns', 'PR_d2delta_dninjs', 'PR_d3delta_dninjnks', 'PR_depsilon_dns', 'PR_d2epsilon_dninjs', 'PR_d3epsilon_dninjnks', 'PR_d2epsilon_dzizjs', 'PR_depsilon_dzs', 'PR_translated_d2delta_dninjs', 'PR_translated_d3delta_dninjnks', 'PR_translated_d3epsilon_dninjnks', 'PR_translated_ddelta_dzs', 'PR_translated_ddelta_dns', 'PR_translated_depsilon_dzs', 'PR_translated_depsilon_dns', 'PR_translated_d2epsilon_dzizjs', 'PR_translated_d2epsilon_dninjs', 'SRK_translated_ddelta_dns', 'SRK_translated_depsilon_dns', 'SRK_translated_d2epsilon_dzizjs', 'SRK_translated_depsilon_dzs', 'SRK_translated_d2delta_dninjs', 'SRK_translated_d3delta_dninjnks', 'SRK_translated_d2epsilon_dninjs', 'SRK_translated_d3epsilon_dninjnks', 'SRK_translated_lnphis_fastest', 'eos_mix_db_dns', 'eos_mix_da_alpha_dns', 'eos_mix_dV_dzs', 'eos_mix_a_alpha_volume'] R2 = R*R R_inv = 1.0/R R2_inv = R_inv*R_inv root_two = sqrt(2.) root_two_m1 = root_two - 1.0 root_two_p1 = root_two + 1.0 def a_alpha_aijs_composition_independent(a_alphas, kijs): N = len(a_alphas) _sqrt = sqrt a_alpha_ijs = [[0.0]*N for _ in range(N)] a_alpha_roots = [0.0]*N for i in range(N): a_alpha_roots[i] = _sqrt(a_alphas[i]) a_alpha_ij_roots_inv = [[0.0]*N for _ in range(N)] for i in range(N): kijs_i = kijs[i] a_alpha_ijs_is = a_alpha_ijs[i] a_alpha_ij_roots_i_inv = a_alpha_ij_roots_inv[i] a_alpha_i_root_i = a_alpha_roots[i] for j in range(i, N): term = a_alpha_i_root_i*a_alpha_roots[j] a_alpha_ij_roots_i_inv[j] = a_alpha_ij_roots_inv[j][i] = 1.0/term a_alpha_ijs_is[j] = a_alpha_ijs[j][i] = (1. - kijs_i[j])*term return a_alpha_ijs, a_alpha_roots, a_alpha_ij_roots_inv def a_alpha_aijs_composition_independent_support_zeros(a_alphas, kijs): N = len(a_alphas) cmps = range(N) a_alpha_ijs = [[0.0] * N for _ in cmps] a_alpha_roots = [a_alpha_i ** 0.5 for a_alpha_i in a_alphas] a_alpha_ij_roots_inv = [[0.0] * N for _ in cmps] for i in cmps: kijs_i = kijs[i] a_alpha_i = a_alphas[i] a_alpha_ijs_is = a_alpha_ijs[i] a_alpha_ij_roots_i_inv = a_alpha_ij_roots_inv[i] a_alpha_i_root_i = a_alpha_roots[i] for j in range(i, N): term = a_alpha_i_root_i * a_alpha_roots[j] try: a_alpha_ij_roots_i_inv[j] = 1.0/term except ZeroDivisionError: a_alpha_ij_roots_i_inv[j] = 1e100 a_alpha_ijs_is[j] = a_alpha_ijs[j][i] = (1. - kijs_i[j]) * term return a_alpha_ijs, a_alpha_roots, a_alpha_ij_roots_inv def a_alpha_and_derivatives(a_alphas, T, zs, kijs, a_alpha_ijs=None, a_alpha_roots=None, a_alpha_ij_roots_inv=None): N = len(a_alphas) da_alpha_dT, d2a_alpha_dT2 = 0.0, 0.0 if a_alpha_ijs is None or a_alpha_roots is None or a_alpha_ij_roots_inv is None: a_alpha_ijs, a_alpha_roots, a_alpha_ij_roots_inv = a_alpha_aijs_composition_independent(a_alphas, kijs) a_alpha = 0.0 for i in range(N): a_alpha_ijs_i = a_alpha_ijs[i] zi = zs[i] for j in range(i+1, N): term = a_alpha_ijs_i[j]*zi*zs[j] a_alpha += term + term a_alpha += a_alpha_ijs_i[i]*zi*zi return a_alpha, None, a_alpha_ijs def a_alpha_and_derivatives_full(a_alphas, da_alpha_dTs, d2a_alpha_dT2s, T, zs, kijs, a_alpha_ijs=None, a_alpha_roots=None, a_alpha_ij_roots_inv=None): N = len(a_alphas) da_alpha_dT, d2a_alpha_dT2 = 0.0, 0.0 if a_alpha_ijs is None or a_alpha_roots is None or a_alpha_ij_roots_inv is None: a_alpha_ijs, a_alpha_roots, a_alpha_ij_roots_inv = a_alpha_aijs_composition_independent(a_alphas, kijs) z_products = [[zs[i]*zs[j] for j in range(N)] for i in range(N)] a_alpha = 0.0 for i in range(N): a_alpha_ijs_i = a_alpha_ijs[i] z_products_i = z_products[i] for j in range(i): term = a_alpha_ijs_i[j]*z_products_i[j] a_alpha += term + term a_alpha += a_alpha_ijs_i[i]*z_products_i[i] da_alpha_dT_ijs = [[0.0]*N for _ in range(N)] d2a_alpha_dT2_ijs = [[0.0]*N for _ in range(N)] d2a_alpha_dT2_ij = 0.0 for i in range(N): kijs_i = kijs[i] a_alphai = a_alphas[i] z_products_i = z_products[i] da_alpha_dT_i = da_alpha_dTs[i] d2a_alpha_dT2_i = d2a_alpha_dT2s[i] a_alpha_ij_roots_inv_i = a_alpha_ij_roots_inv[i] da_alpha_dT_ijs_i = da_alpha_dT_ijs[i] for j in range(N): if j < i: continue a_alphaj = a_alphas[j] x0_05_inv = a_alpha_ij_roots_inv_i[j] zi_zj = z_products_i[j] da_alpha_dT_j = da_alpha_dTs[j] x1 = a_alphai*da_alpha_dT_j x2 = a_alphaj*da_alpha_dT_i x1_x2 = x1 + x2 x3 = x1_x2 + x1_x2 kij_m1 = kijs_i[j] - 1.0 da_alpha_dT_ij = -0.5*kij_m1*x1_x2*x0_05_inv da_alpha_dT_ijs_i[j] = da_alpha_dT_ijs[j][i] = da_alpha_dT_ij da_alpha_dT_ij *= zi_zj x0 = a_alphai*a_alphaj d2a_alpha_dT2_ij = kij_m1*( (x0*( -0.5*(a_alphai*d2a_alpha_dT2s[j] + a_alphaj*d2a_alpha_dT2_i) - da_alpha_dT_i*da_alpha_dT_j) +.25*x1_x2*x1_x2)/(x0_05_inv*x0*x0)) d2a_alpha_dT2_ijs[i][j] = d2a_alpha_dT2_ijs[j][i] = d2a_alpha_dT2_ij d2a_alpha_dT2_ij *= zi_zj if i != j: da_alpha_dT += da_alpha_dT_ij + da_alpha_dT_ij d2a_alpha_dT2 += d2a_alpha_dT2_ij + d2a_alpha_dT2_ij else: da_alpha_dT += da_alpha_dT_ij d2a_alpha_dT2 += d2a_alpha_dT2_ij return a_alpha, da_alpha_dT, d2a_alpha_dT2, a_alpha_ijs, da_alpha_dT_ijs, d2a_alpha_dT2_ijs def a_alpha_quadratic_terms(a_alphas, a_alpha_roots, T, zs, kijs, a_alpha_j_rows=None, vec0=None): N = len(a_alphas) if a_alpha_j_rows is None: a_alpha_j_rows = [0.0]*N for i in range(N): a_alpha_j_rows[i] = 0.0 if vec0 is None: vec0 = [0.0]*N for i in range(N): vec0[i] = a_alpha_roots[i]*zs[i] a_alpha = 0.0 i = 0 while i < N: kijs_i = kijs[i] j = 0 while j < i: a_alpha_j_rows[j] += (1. - kijs_i[j])*vec0[i] a_alpha_j_rows[i] += (1. - kijs_i[j])*vec0[j] j += 1 i += 1 for i in range(N): a_alpha_j_rows[i] *= a_alpha_roots[i] a_alpha_j_rows[i] += (1. - kijs[i][i])*a_alphas[i]*zs[i] a_alpha += a_alpha_j_rows[i]*zs[i] return a_alpha, a_alpha_j_rows ''' N = len(a_alphas) a_alpha_j_rows = [0.0]*N a_alpha = 0.0 for i in range(N): kijs_i = kijs[i] a_alpha_i_root_i = a_alpha_roots[i] for j in range(i): a_alpha_ijs_ij = (1. - kijs_i[j])*a_alpha_i_root_i*a_alpha_roots[j] t200 = a_alpha_ijs_ij*zs[i] a_alpha_j_rows[j] += t200 a_alpha_j_rows[i] += zs[j]*a_alpha_ijs_ij t200 *= zs[j] a_alpha += t200 + t200 t200 = (1. - kijs_i[i])*a_alphas[i]*zs[i] a_alpha += t200*zs[i] a_alpha_j_rows[i] += t200 return a_alpha, a_alpha_j_rows '''
MIT License
vlsida/openram
compiler/modules/multibank.py
multibank.route_control_lines
python
def route_control_lines(self): connection = [] connection.append((self.prefix+"clk_buf_bar", self.precharge_array_inst.get_pin("en").lc())) connection.append((self.prefix+"w_en", self.write_driver_array_inst.get_pin("en").lc())) connection.append((self.prefix+"s_en", self.sense_amp_array_inst.get_pin("en").lc())) for (control_signal, pin_pos) in connection: control_pos = vector(self.bus_xoffset[control_signal].x ,pin_pos.y) self.add_path("m1", [control_pos, pin_pos]) self.add_via_center(layers=self.m1_stack, offset=control_pos, rotate=90) control_signal = self.prefix+"clk_buf" pin_pos = self.wordline_driver_inst.get_pin("en").uc() mid_pos = pin_pos + vector(0,self.m1_pitch) control_x_offset = self.bus_xoffset[control_signal].x control_pos = vector(control_x_offset + self.m1_width, mid_pos.y) self.add_wire(self.m1_stack,[pin_pos, mid_pos, control_pos]) control_via_pos = vector(control_x_offset, mid_pos.y) self.add_via_center(layers=self.m1_stack, offset=control_via_pos, rotate=90)
Route the control lines of the entire bank
https://github.com/vlsida/openram/blob/f66aac3264598eeae31225c62b6a4af52412d407/compiler/modules/multibank.py#L752-L783
import sys from tech import drc, parameter import debug import design import math from math import log,sqrt,ceil import contact from vector import vector from sram_factory import factory from globals import OPTS class multibank(design.design): def __init__(self, name, word_size, num_words, words_per_row, num_banks=1): super().__init__(name) debug.info(2, "create sram of size {0} with {1} words".format(word_size,num_words)) self.word_size = word_size self.num_words = num_words self.words_per_row = words_per_row self.num_banks = num_banks if self.num_banks>1: self.prefix="gated_" else: self.prefix="" self.compute_sizes() self.add_pins() self.add_modules() self.create_instances() self.setup_layout_constraints() self.add_bank_select() self.route_layout() self.add_lvs_correspondence_points() self.bank_center=self.offset_all_coordinates().scale(-1,-1) self.DRC_LVS() def add_pins(self): for i in range(self.word_size): self.add_pin("dout_{0}".format(i),"OUT") for i in range(self.word_size): self.add_pin("bank_din_{0}".format(i),"IN") for i in range(self.addr_size): self.add_pin("a_{0}".format(i),"INPUT") if self.num_banks > 1: self.add_pin("bank_sel","INPUT") for pin in ["s_en","w_en","tri_en_bar","tri_en", "clk_buf_bar","clk_buf"]: self.add_pin(pin,"INPUT") self.add_pin("vdd","POWER") self.add_pin("gnd","GROUND") def route_layout(self): self.route_central_bus() self.route_precharge_to_bitcell_array() self.route_col_mux_to_bitcell_array() self.route_sense_amp_to_col_mux_or_bitcell_array() self.route_sense_amp_out() self.route_wordline_driver() self.route_write_driver() self.route_row_decoder() self.route_column_address_lines() self.route_control_lines() self.add_control_pins() if self.num_banks > 1: self.route_bank_select() self.route_supplies() def create_instances(self): self.add_bitcell_array() self.add_precharge_array() self.add_column_mux_array() self.add_sense_amp_array() self.add_write_driver_array() self.add_row_decoder() self.add_wordline_driver() self.add_column_decoder() def compute_sizes(self): self.num_cols = int(self.words_per_row*self.word_size) self.num_rows = int(self.num_words / self.words_per_row) self.row_addr_size = int(log(self.num_rows, 2)) self.col_addr_size = int(log(self.words_per_row, 2)) self.addr_size = self.col_addr_size + self.row_addr_size debug.check(self.num_rows*self.num_cols==self.word_size*self.num_words,"Invalid bank sizes.") debug.check(self.addr_size==self.col_addr_size + self.row_addr_size,"Invalid address break down.") self.supply_rail_width = 4*self.m2_width self.supply_rail_pitch = self.supply_rail_width + 4*self.m2_space self.num_control_lines = 6 self.input_control_signals = ["clk_buf", "tri_en_bar", "tri_en", "clk_buf_bar", "w_en", "s_en"] if self.num_banks > 1: self.control_signals = ["gated_"+str for str in self.input_control_signals] else: self.control_signals = self.input_control_signals if self.col_addr_size>0: self.num_col_addr_lines = 2**self.col_addr_size else: self.num_col_addr_lines = 0 self.central_bus_width = self.m2_pitch * self.num_control_lines + 2*self.m2_width self.m2_gap = max(2*drc("pwell_to_nwell"] + drc["well_enclose_active"), 2*self.m2_pitch) def add_modules(self): self.tri = self.mod_tri_gate() self.bitcell = self.mod_bitcell() self.bitcell_array = self.mod_bitcell_array(cols=self.num_cols, rows=self.num_rows) self.add_mod(self.bitcell_array) self.precharge_array = self.mod_precharge_array(columns=self.num_cols) self.add_mod(self.precharge_array) if self.col_addr_size > 0: self.column_mux_array = self.mod_column_mux_array(columns=self.num_cols, word_size=self.word_size) self.add_mod(self.column_mux_array) self.sense_amp_array = self.mod_sense_amp_array(word_size=self.word_size, words_per_row=self.words_per_row) self.add_mod(self.sense_amp_array) if self.write_size: self.write_mask_driver_array = self.mod_write_mask_driver_array(columns=self.num_cols, word_size=self.word_size, write_size=self.write_size) self.add_mod(self.write_mask_driver_array) else: self.write_driver_array = self.mod_write_driver_array(columns=self.num_cols, word_size=self.word_size) self.add_mod(self.write_driver_array) self.row_decoder = self.mod_decoder(rows=self.num_rows) self.add_mod(self.row_decoder) self.tri_gate_array = self.mod_tri_gate_array(columns=self.num_cols, word_size=self.word_size) self.add_mod(self.tri_gate_array) self.wordline_driver = self.mod_wordline_driver(rows=self.num_rows) self.add_mod(self.wordline_driver) self.inv = pinv() self.add_mod(self.inv) if(self.num_banks > 1): self.bank_select = self.mod_bank_select() self.add_mod(self.bank_select) def add_bitcell_array(self): self.bitcell_array_inst=self.add_inst(name="bitcell_array", mod=self.bitcell_array, offset=vector(0,0)) temp = [] for i in range(self.num_cols): temp.append("bl_{0}".format(i)) temp.append("br_{0}".format(i)) for j in range(self.num_rows): temp.append("wl_{0}".format(j)) temp.extend(["vdd", "gnd"]) self.connect_inst(temp) def add_precharge_array(self): y_offset = self.bitcell_array.height + self.m2_gap self.precharge_array_inst=self.add_inst(name="precharge_array", mod=self.precharge_array, offset=vector(0,y_offset)) temp = [] for i in range(self.num_cols): temp.append("bl_{0}".format(i)) temp.append("br_{0}".format(i)) temp.extend([self.prefix+"clk_buf_bar", "vdd"]) self.connect_inst(temp) def add_column_mux_array(self): if self.col_addr_size > 0: self.column_mux_height = self.column_mux_array.height + self.m2_gap else: self.column_mux_height = 0 return y_offset = self.column_mux_height self.col_mux_array_inst=self.add_inst(name="column_mux_array", mod=self.column_mux_array, offset=vector(0,y_offset).scale(-1,-1)) temp = [] for i in range(self.num_cols): temp.append("bl_{0}".format(i)) temp.append("br_{0}".format(i)) for k in range(self.words_per_row): temp.append("sel_{0}".format(k)) for j in range(self.word_size): temp.append("bl_out_{0}".format(j)) temp.append("br_out_{0}".format(j)) temp.append("gnd") self.connect_inst(temp) def add_sense_amp_array(self): y_offset = self.column_mux_height + self.sense_amp_array.height + self.m2_gap self.sense_amp_array_inst=self.add_inst(name="sense_amp_array", mod=self.sense_amp_array, offset=vector(0,y_offset).scale(-1,-1)) temp = [] for i in range(self.word_size): temp.append("sa_out_{0}".format(i)) if self.words_per_row == 1: temp.append("bl_{0}".format(i)) temp.append("br_{0}".format(i)) else: temp.append("bl_out_{0}".format(i)) temp.append("br_out_{0}".format(i)) temp.extend([self.prefix+"s_en", "vdd", "gnd"]) self.connect_inst(temp) def add_write_driver_array(self): y_offset = self.sense_amp_array.height + self.column_mux_height + self.m2_gap + self.write_driver_array.height self.write_driver_array_inst=self.add_inst(name="write_driver_array", mod=self.write_driver_array, offset=vector(0,y_offset).scale(-1,-1)) temp = [] for i in range(self.word_size): temp.append("bank_din_{0}".format(i)) for i in range(self.word_size): if (self.words_per_row == 1): temp.append("bl_{0}".format(i)) temp.append("br_{0}".format(i)) else: temp.append("bl_out_{0}".format(i)) temp.append("br_out_{0}".format(i)) temp.extend([self.prefix+"w_en", "vdd", "gnd"]) self.connect_inst(temp) def add_tri_gate_array(self): y_offset = self.sense_amp_array.height+self.column_mux_height + self.m2_gap + self.tri_gate_array.height self.tri_gate_array_inst=self.add_inst(name="tri_gate_array", mod=self.tri_gate_array, offset=vector(0,y_offset).scale(-1,-1)) temp = [] for i in range(self.word_size): temp.append("sa_out_{0}".format(i)) for i in range(self.word_size): temp.append("dout_{0}".format(i)) temp.extend([self.prefix+"tri_en", self.prefix+"tri_en_bar", "vdd", "gnd"]) self.connect_inst(temp) def add_row_decoder(self): x_offset = -(self.row_decoder.width + self.central_bus_width + self.wordline_driver.width) self.row_decoder_inst=self.add_inst(name="row_decoder", mod=self.row_decoder, offset=vector(x_offset,0)) temp = [] for i in range(self.row_addr_size): temp.append("A_{0}".format(i+self.col_addr_size)) for j in range(self.num_rows): temp.append("dec_out_{0}".format(j)) temp.extend(["vdd", "gnd"]) self.connect_inst(temp) def add_wordline_driver(self): x_offset = -(self.central_bus_width + self.wordline_driver.width) + self.m2_pitch self.wordline_driver_inst=self.add_inst(name="wordline_driver", mod=self.wordline_driver, offset=vector(x_offset,0)) temp = [] for i in range(self.num_rows): temp.append("dec_out_{0}".format(i)) for i in range(self.num_rows): temp.append("wl_{0}".format(i)) temp.append(self.prefix+"clk_buf") temp.append("vdd") temp.append("gnd") self.connect_inst(temp) def add_column_decoder_module(self): x_off = -(self.central_bus_width + self.wordline_driver.width + self.col_decoder.width) y_off = -(self.col_decoder.height + 2*drc("well_to_well")) self.col_decoder_inst=self.add_inst(name="col_address_decoder", mod=self.col_decoder, offset=vector(x_off,y_off)) temp = [] for i in range(self.col_addr_size): temp.append("A_{0}".format(i)) for j in range(self.num_col_addr_lines): temp.append("sel_{0}".format(j)) temp.extend(["vdd", "gnd"]) self.connect_inst(temp) def add_column_decoder(self): if self.col_addr_size == 0: return elif self.col_addr_size == 1: self.col_decoder = pinvbuf(height=self.mod_dff.height) self.add_mod(self.col_decoder) elif self.col_addr_size == 2: self.col_decoder = self.row_decoder.pre2_4 elif self.col_addr_size == 3: self.col_decoder = self.row_decoder.pre3_8 else: debug.error("Invalid column decoder?",-1) self.add_column_decoder_module() def add_bank_select(self): if not self.num_banks > 1: return x_off = -(self.row_decoder.width + self.central_bus_width + self.wordline_driver.width) if self.col_addr_size > 0: y_off = min(self.col_decoder_inst.by(), self.col_mux_array_inst.by()) else: y_off = self.row_decoder_inst.by() y_off -= (self.bank_select.height + drc("well_to_well")) self.bank_select_pos = vector(x_off,y_off) self.bank_select_inst = self.add_inst(name="bank_select", mod=self.bank_select, offset=self.bank_select_pos) temp = [] temp.extend(self.input_control_signals) temp.append("bank_sel") temp.extend(self.control_signals) temp.extend(["vdd", "gnd"]) self.connect_inst(temp) def route_supplies(self): for inst in self.insts: self.copy_power_pins(inst,"vdd") self.copy_power_pins(inst,"gnd") def route_bank_select(self): for input_name in self.input_control_signals+["bank_sel"]: self.copy_layout_pin(self.bank_select_inst, input_name) for gated_name in self.control_signals: out_pos = self.bank_select_inst.get_pin(gated_name).rc() bus_pos = vector(self.bus_xoffset[gated_name], out_pos.y) self.add_path("m3",[out_pos, bus_pos]) self.add_via_center(layers=self.m2_stack, offset=bus_pos, rotate=90) self.add_via_center(layers=self.m1_stack, offset=out_pos, rotate=90) self.add_via_center(layers=self.m2_stack, offset=out_pos, rotate=90) def setup_layout_constraints(self): write_driver_min_y_offset = self.write_driver_array_inst.by() - 3*self.m2_pitch row_decoder_min_y_offset = self.row_decoder_inst.by() if self.col_addr_size > 0: col_decoder_min_y_offset = self.col_decoder_inst.by() else: col_decoder_min_y_offset = row_decoder_min_y_offset if self.num_banks>1: self.min_y_offset = min(col_decoder_min_y_offset - self.bank_select.height, write_driver_min_y_offset) else: self.min_y_offset = min(col_decoder_min_y_offset, write_driver_min_y_offset) self.max_y_offset = self.precharge_array_inst.uy() + 3*self.m1_width self.max_x_offset = self.bitcell_array_inst.ur().x + 3*self.m1_width self.min_x_offset = self.row_decoder_inst.lx() ur = vector(self.max_x_offset, self.max_y_offset) ll = vector(self.min_x_offset, self.min_y_offset) self.core_bbox = [ll, ur] self.height = ur.y - ll.y self.width = ur.x - ll.x def route_central_bus(self): control_bus_offset = vector(-self.m2_pitch * self.num_control_lines - self.m2_width, 0) control_bus_length = self.bitcell_array_inst.uy() self.bus_xoffset = self.create_vertical_bus(layer="m2", pitch=self.m2_pitch, offset=control_bus_offset, names=self.control_signals, length=control_bus_length) def route_precharge_to_bitcell_array(self): for i in range(self.num_cols): precharge_bl = self.precharge_array_inst.get_pin("bl_{}".format(i)).bc() precharge_br = self.precharge_array_inst.get_pin("br_{}".format(i)).bc() bitcell_bl = self.bitcell_array_inst.get_pin("bl_{}".format(i)).uc() bitcell_br = self.bitcell_array_inst.get_pin("br_{}".format(i)).uc() yoffset = 0.5*(precharge_bl.y+bitcell_bl.y) self.add_path("m2",[precharge_bl, vector(precharge_bl.x,yoffset), vector(bitcell_bl.x,yoffset), bitcell_bl]) self.add_path("m2",[precharge_br, vector(precharge_br.x,yoffset), vector(bitcell_br.x,yoffset), bitcell_br]) def route_col_mux_to_bitcell_array(self): if self.col_addr_size==0: return for i in range(self.num_cols): col_mux_bl = self.col_mux_array_inst.get_pin("bl_{}".format(i)).uc() col_mux_br = self.col_mux_array_inst.get_pin("br_{}".format(i)).uc() bitcell_bl = self.bitcell_array_inst.get_pin("bl_{}".format(i)).bc() bitcell_br = self.bitcell_array_inst.get_pin("br_{}".format(i)).bc() yoffset = 0.5*(col_mux_bl.y+bitcell_bl.y) self.add_path("m2",[col_mux_bl, vector(col_mux_bl.x,yoffset), vector(bitcell_bl.x,yoffset), bitcell_bl]) self.add_path("m2",[col_mux_br, vector(col_mux_br.x,yoffset), vector(bitcell_br.x,yoffset), bitcell_br]) def route_sense_amp_to_col_mux_or_bitcell_array(self): for i in range(self.word_size): sense_amp_bl = self.sense_amp_array_inst.get_pin("bl_{}".format(i)).uc() sense_amp_br = self.sense_amp_array_inst.get_pin("br_{}".format(i)).uc() if self.col_addr_size>0: connect_bl = self.col_mux_array_inst.get_pin("bl_out_{}".format(i)).bc() connect_br = self.col_mux_array_inst.get_pin("br_out_{}".format(i)).bc() else: connect_bl = self.bitcell_array_inst.get_pin("bl_{}".format(i)).bc() connect_br = self.bitcell_array_inst.get_pin("br_{}".format(i)).bc() yoffset = 0.5*(sense_amp_bl.y+connect_bl.y) self.add_path("m2",[sense_amp_bl, vector(sense_amp_bl.x,yoffset), vector(connect_bl.x,yoffset), connect_bl]) self.add_path("m2",[sense_amp_br, vector(sense_amp_br.x,yoffset), vector(connect_br.x,yoffset), connect_br]) def route_sense_amp_to_trigate(self): for i in range(self.word_size): tri_gate_in = self.tri_gate_array_inst.get_pin("in_{}".format(i)).lc() sa_data_out = self.sense_amp_array_inst.get_pin("data_{}".format(i)).bc() self.add_via_center(layers=self.m2_stack, offset=tri_gate_in) self.add_via_center(layers=self.m2_stack, offset=sa_data_out) self.add_path("m3",[sa_data_out,tri_gate_in]) def route_sense_amp_out(self): for i in range(self.word_size): data_pin = self.sense_amp_array_inst.get_pin("data_{}".format(i)) self.add_layout_pin_rect_center(text="dout_{}".format(i), layer=data_pin.layer, offset=data_pin.center(), height=data_pin.height(), width=data_pin.width()), def route_tri_gate_out(self): for i in range(self.word_size): data_pin = self.tri_gate_array_inst.get_pin("out_{}".format(i)) self.add_layout_pin_rect_center(text="dout_{}".format(i), layer=data_pin.layer, offset=data_pin.center(), height=data_pin.height(), width=data_pin.width()), def route_row_decoder(self): for i in range(self.row_addr_size): addr_idx = i + self.col_addr_size decoder_name = "a_{}".format(i) addr_name = "a_{}".format(addr_idx) self.copy_layout_pin(self.row_decoder_inst, decoder_name, addr_name) def route_write_driver(self): for i in range(self.word_size): data_name = "data_{}".format(i) din_name = "bank_din_{}".format(i) self.copy_layout_pin(self.write_driver_array_inst, data_name, din_name) def route_wordline_driver(self): for i in range(self.num_rows): decoder_out_pos = self.row_decoder_inst.get_pin("decode_{}".format(i)).rc() driver_in_pos = self.wordline_driver_inst.get_pin("in_{}".format(i)).lc() mid1 = decoder_out_pos.scale(0.5,1)+driver_in_pos.scale(0.5,0) mid2 = decoder_out_pos.scale(0.5,0)+driver_in_pos.scale(0.5,1) self.add_path("m1", [decoder_out_pos, mid1, mid2, driver_in_pos]) driver_wl_pos = self.wordline_driver_inst.get_pin("wl_{}".format(i)).rc() bitcell_wl_pos = self.bitcell_array_inst.get_pin("wl_{}".format(i)).lc() mid1 = driver_wl_pos.scale(0.5,1)+bitcell_wl_pos.scale(0.5,0) mid2 = driver_wl_pos.scale(0.5,0)+bitcell_wl_pos.scale(0.5,1) self.add_path("m1", [driver_wl_pos, mid1, mid2, bitcell_wl_pos]) def route_column_address_lines(self): if not self.col_addr_size>0: return if self.col_addr_size == 1: decode_names = ["Zb", "Z"] self.copy_layout_pin(self.col_decoder_inst, "A", "a[0]") elif self.col_addr_size > 1: decode_names = [] for i in range(self.num_col_addr_lines): decode_names.append("out_{}".format(i)) for i in range(self.col_addr_size): decoder_name = "in_{}".format(i) addr_name = "a_{}".format(i) self.copy_layout_pin(self.col_decoder_inst, decoder_name, addr_name) top_y_offset = self.col_mux_array_inst.get_pin("sel_{}".format(self.num_col_addr_lines-1)).cy() for (decode_name,i) in zip(decode_names,range(self.num_col_addr_lines)): mux_name = "sel_{}".format(i) mux_addr_pos = self.col_mux_array_inst.get_pin(mux_name).lc() decode_out_pos = self.col_decoder_inst.get_pin(decode_name).center() delta_offset = self.col_decoder_inst.rx() - decode_out_pos.x + self.m2_pitch if decode_out_pos.y > top_y_offset: mid1_pos = vector(decode_out_pos.x + delta_offset + i*self.m2_pitch,decode_out_pos.y) else: mid1_pos = vector(decode_out_pos.x + delta_offset + (self.num_col_addr_lines-i)*self.m2_pitch,decode_out_pos.y) mid2_pos = vector(mid1_pos.x,mux_addr_pos.y) self.add_path("m1",[decode_out_pos, mid1_pos, mid2_pos, mux_addr_pos]) def add_lvs_correspondence_points(self): for i in range(self.num_rows): wl_name = "wl_{}".format(i) wl_pin = self.bitcell_array_inst.get_pin(wl_name) self.add_label(text=wl_name, layer="m1", offset=wl_pin.center()) for i in range(self.num_cols): bl_name = "bl_{}".format(i) br_name = "br_{}".format(i) bl_pin = self.bitcell_array_inst.get_pin(bl_name) br_pin = self.bitcell_array_inst.get_pin(br_name) self.add_label(text=bl_name, layer="m2", offset=bl_pin.center()) self.add_label(text=br_name, layer="m2", offset=br_pin.center()) for i in range(self.word_size): data_name = "dec_out_{}".format(i) pin_name = "in_{}".format(i) data_pin = self.wordline_driver_inst.get_pin(pin_name) self.add_label(text=data_name, layer="m1", offset=data_pin.center())
BSD 3-Clause New or Revised License
airbnb/omniduct
omniduct/caches/base.py
Cache.unset
python
def unset(self, key, namespace=None): namespace, key = self._namespace(namespace), self._key(key) if not self._has_key(namespace, key): raise KeyError("{} (namespace: {})".format(key, namespace)) self._remove_key(namespace, key)
Remove the nominated key from the cache. Args: key (str): The key which should be unset. namespace (str, None): The namespace to be used.
https://github.com/airbnb/omniduct/blob/af0148e1f0de147c6c3757dc8c22293225caa543/omniduct/caches/base.py#L268-L279
import datetime import functools import sys from abc import abstractmethod import dateutil import pandas import six import yaml from decorator import decorator from interface_meta import quirk_docs from omniduct.duct import Duct from omniduct.utils.config import config from omniduct.utils.debug import logger from omniduct.utils.decorators import function_args_as_kwargs, require_connection from ._serializers import PickleSerializer config.register( 'cache_fail_hard', description='Raise an exception if a cache fails to save (otherwise errors are logged and suppressed).', default=False ) def cached_method( key, namespace=lambda self, kwargs: ( self.cache_namespace or "{}.{}".format(self.__class__.__name__, self.name) ), cache=lambda self, kwargs: self.cache, use_cache=lambda self, kwargs: kwargs.pop('use_cache', True), renew=lambda self, kwargs: kwargs.pop('renew', False), serializer=lambda self, kwargs: PickleSerializer(), metadata=lambda self, kwargs: None ): @decorator def wrapped(method, self, *args, **kwargs): kwargs = function_args_as_kwargs(method, self, *args, **kwargs) kwargs.pop('self') _key = key(self, kwargs) _namespace = namespace(self, kwargs) _cache = cache(self, kwargs) _use_cache = use_cache(self, kwargs) _renew = renew(self, kwargs) _serializer = serializer(self, kwargs) _metadata = metadata(self, kwargs) if _cache is None or not _use_cache: return method(self, **kwargs) if _cache.has_key(_key, namespace=_namespace) and not _renew: try: return _cache.get( _key, namespace=_namespace, serializer=_serializer ) except: logger.warning("Failed to retrieve results from cache. Renewing the cache...") if config.cache_fail_hard: six.reraise(*sys.exc_info()) finally: logger.caveat('Loaded from cache') value = method(self, **kwargs) if value is None: logger.warning("Method value returned None. Not saving to cache.") return try: _cache.set( _key, value=value, namespace=_namespace, serializer=_serializer, metadata=_metadata ) return _cache.get( _key, namespace=_namespace, serializer=_serializer ) except: logger.warning("Failed to save results to cache. If needed, please save them manually.") if config.cache_fail_hard: six.reraise(*sys.exc_info()) return value return wrapped class Cache(Duct): DUCT_TYPE = Duct.Type.CACHE @quirk_docs('_init', mro=True) def __init__(self, **kwargs): Duct.__init_with_kwargs__(self, kwargs) self._init(**kwargs) @abstractmethod def _init(self): pass @require_connection def set(self, key, value, namespace=None, serializer=None, metadata=None): namespace, key = self._namespace(namespace), self._key(key) serializer = serializer or PickleSerializer() try: with self._get_stream_for_key(namespace, key, 'data{}'.format(serializer.file_extension), mode='wb', create=True) as fh: serializer.serialize(value, fh) self.set_metadata(key, metadata, namespace=namespace, replace=True) except: self.unset(key, namespace=namespace) six.reraise(*sys.exc_info()) @require_connection def set_metadata(self, key, metadata, namespace=None, replace=False): namespace, key = self._namespace(namespace), self._key(key) if replace: orig_metadata = {'created': datetime.datetime.utcnow()} else: orig_metadata = self.get_metadata(key, namespace=namespace) orig_metadata.update(metadata or {}) with self._get_stream_for_key(namespace, key, 'metadata', mode='w', create=True) as fh: yaml.safe_dump(orig_metadata, fh, default_flow_style=False) @require_connection def get(self, key, namespace=None, serializer=None): namespace, key = self._namespace(namespace), self._key(key) serializer = serializer or PickleSerializer() if not self._has_key(namespace, key): raise KeyError("{} (namespace: {})".format(key, namespace)) try: with self._get_stream_for_key(namespace, key, 'data{}'.format(serializer.file_extension), mode='rb', create=False) as fh: return serializer.deserialize(fh) finally: self.set_metadata(key, namespace=namespace, metadata={'last_accessed': datetime.datetime.utcnow()}) @require_connection def get_bytecount(self, key, namespace=None): namespace, key = self._namespace(namespace), self._key(key) if not self._has_key(namespace, key): raise KeyError("{} (namespace: {})".format(key, namespace)) return self._get_bytecount_for_key(namespace, key) @require_connection def get_metadata(self, key, namespace=None): namespace, key = self._namespace(namespace), self._key(key) if not self._has_key(namespace, key): raise KeyError("{} (namespace: {})".format(key, namespace)) try: with self._get_stream_for_key(namespace, key, 'metadata', mode='r', create=False) as fh: return yaml.safe_load(fh) except: return {} @require_connection
MIT License
vanderhoof/pydbml
pydbml/definitions/reference.py
parse_ref
python
def parse_ref(s, l, t): init_dict = { 'type_': t['type'], 'table1': t['table1'], 'col1': t['field1'], 'table2': t['table2'], 'col2': t['field2'] } if 'name' in t: init_dict['name'] = t['name'] if 'settings' in t: init_dict.update(t['settings']) if 'comment' in t: init_dict['comment'] = t['comment'][0] if 'comment' not in init_dict and 'comment_before' in t: comment = '\n'.join(c[0] for c in t['comment_before']) init_dict['comment'] = comment ref = ReferenceBlueprint(**init_dict) return ref
ref name: table1.col1 > table2.col2 or ref name { table1.col1 < table2.col2 }
https://github.com/vanderhoof/pydbml/blob/4e2bec69232a6d751d616225e717e90901cd6ca9/pydbml/definitions/reference.py#L101-L129
import pyparsing as pp from pydbml.classes import ReferenceBlueprint from .common import _ from .common import _c from .common import c from .common import n from .generic import name pp.ParserElement.setDefaultWhitespaceChars(' \t\r') relation = pp.oneOf("> - <") ref_inline = pp.Literal("ref:") - relation('type') - name('table') - '.' - name('field') def parse_inline_relation(s, l, t): return ReferenceBlueprint(type_=t['type'], table2=t['table'], col2=t['field']) ref_inline.setParseAction(parse_inline_relation) on_option = ( pp.CaselessLiteral('no action') | pp.CaselessLiteral('restrict') | pp.CaselessLiteral('cascade') | pp.CaselessLiteral('set null') | pp.CaselessLiteral('set default') ) update = pp.CaselessLiteral("update:").suppress() + _ + on_option delete = pp.CaselessLiteral("delete:").suppress() + _ + on_option ref_setting = _ + (update('update') | delete('delete')) + _ ref_settings = ( '[' + ref_setting + ( ',' + ref_setting )[...] + ']' + c ) def parse_ref_settings(s, l, t): result = {} if 'update' in t: result['on_update'] = t['update'][0] if 'delete' in t: result['on_delete'] = t['delete'][0] if 'comment' in t: result['comment'] = t['comment'][0] return result ref_settings.setParseAction(parse_ref_settings) composite_name = ( '(' + pp.White()[...] - name + pp.White()[...] + ( pp.White()[...] + "," + pp.White()[...] + name + pp.White()[...] )[...] + ')' ) name_or_composite = name | pp.Combine(composite_name) ref_body = ( name('table1') - '.' - name_or_composite('field1') - relation('type') - name('table2') - '.' - name_or_composite('field2') + c + ref_settings('settings')[0, 1] ) ref_short = _c + pp.CaselessLiteral('ref') + name('name')[0, 1] + ':' - ref_body ref_long = _c + ( pp.CaselessLiteral('ref') + _ + name('name')[0, 1] + _ + '{' + _ - ref_body + _ - '}' )
MIT License
stanfordvl/jrmot_ros
paper_experiments/models/pointnet_model.py
PointNet.get_model
python
def get_model(self, point_cloud, is_training, bn_decay=None): batch_size = point_cloud.get_shape()[0].value end_points = {} with tf.variable_scope('transform_net1', reuse=tf.AUTO_REUSE) as sc: transform = input_transform_net(point_cloud, is_training, bn_decay, K=3) point_cloud_transformed = tf.matmul(point_cloud, transform) input_image = tf.expand_dims(point_cloud_transformed, -1) net = pointnet_tf_util.conv2d(input_image, 64, [1,3], padding='VALID', stride=[1,1], bn=True, is_training=is_training, scope='conv1', bn_decay=bn_decay) net = pointnet_tf_util.conv2d(net, 64, [1,1], padding='VALID', stride=[1,1], bn=True, is_training=is_training, scope='conv2', bn_decay=bn_decay) with tf.variable_scope('transform_net2', reuse=tf.AUTO_REUSE) as sc: transform = feature_transform_net(net, is_training, bn_decay, K=64) end_points['transform'] = transform net_transformed = tf.matmul(tf.squeeze(net, axis=[2]), transform) net_transformed = tf.expand_dims(net_transformed, [2]) net = pointnet_tf_util.conv2d(net_transformed, 64, [1,1], padding='VALID', stride=[1,1], bn=True, is_training=is_training, scope='conv3', bn_decay=bn_decay) net = pointnet_tf_util.conv2d(net, 128, [1,1], padding='VALID', stride=[1,1], bn=True, is_training=is_training, scope='conv4', bn_decay=bn_decay) net = pointnet_tf_util.conv2d(net, 1024, [1,1], padding='VALID', stride=[1,1], bn=True, is_training=is_training, scope='conv5', bn_decay=bn_decay) net = tf.reduce_max(net, axis = 1) net = tf.reshape(net, [batch_size, -1]) feature = net return feature
Classification PointNet, input is BxNx3, output Bx40
https://github.com/stanfordvl/jrmot_ros/blob/4ae2006e2ec6a8314b56e265f656e131d12a049d/paper_experiments/models/pointnet_model.py#L49-L93
import os, pdb import tensorflow as tf tf.logging.set_verbosity(tf.logging.ERROR) import configparser from utils.pointnet_transform_nets import input_transform_net, feature_transform_net import utils.pointnet_tf_util as pointnet_tf_util class PointNet(): def __init__(self, config_path): parser = configparser.SafeConfigParser() parser.read(config_path) num_points = parser.getint('general', 'num_point') depth_model_path = parser.get('general', 'depth_model_path') with tf.device('/gpu:'+str(0)): self.pointclouds_pl, _ = self.placeholder_inputs(1, num_points) self.is_training_pl = tf.placeholder(tf.bool, shape=()) feature = self.get_model(self.pointclouds_pl, self.is_training_pl) self.feature = feature self.saver = tf.train.Saver() config = tf.ConfigProto() config.gpu_options.allow_growth = True config.allow_soft_placement = True config.log_device_placement = False self.sess = tf.Session(config=config) self.sess.run(tf.global_variables_initializer()) self.saver.restore(self.sess, depth_model_path) def __call__(self, input_point_cloud): feed_dict = {self.pointclouds_pl: input_point_cloud, self.is_training_pl: False} features = self.sess.run(self.feature,feed_dict=feed_dict) return features def placeholder_inputs(self, batch_size, num_point): pointclouds_pl = tf.placeholder(tf.float32, shape=(batch_size, None, 3)) labels_pl = tf.placeholder(tf.int32, shape=(batch_size)) return pointclouds_pl, labels_pl
MIT License
deepmind/acme
acme/utils/lp_utils.py
StepsLimiter.run
python
def run(self): logging.info('StepsLimiter: Starting with max_steps = %d (%s)', self._max_steps, self._steps_key) while True: counts = self._counter.get_counts() num_steps = counts.get(self._steps_key, 0) logging.info('StepsLimiter: Reached %d recorded steps', num_steps) if num_steps > self._max_steps: logging.info('StepsLimiter: Max steps of %d was reached, terminating', self._max_steps) lp.stop() for _ in range(10): time.sleep(1)
Run steps limiter to terminate an experiment when max_steps is reached.
https://github.com/deepmind/acme/blob/39232315e1761219bcc98e7a4ecdd308a42b00e4/acme/utils/lp_utils.py#L81-L103
import functools import inspect import time from typing import Any, Callable from absl import flags from absl import logging from acme.utils import counting import launchpad as lp FLAGS = flags.FLAGS def partial_kwargs(function: Callable[..., Any], **kwargs: Any) -> Callable[..., Any]: argspec = inspect.getfullargspec(function) if argspec.defaults is None: defaults = [] else: defaults = argspec.args[-len(argspec.defaults):] unknown_kwargs = set(kwargs.keys()).difference(defaults) if unknown_kwargs: error_string = 'Cannot override unknown or non-default kwargs: {}' raise ValueError(error_string.format(', '.join(unknown_kwargs))) return functools.partial(function, **kwargs) class StepsLimiter: def __init__(self, counter: counting.Counter, max_steps: int, steps_key: str = 'actor_steps'): self._counter = counter self._max_steps = max_steps self._steps_key = steps_key
Apache License 2.0
geertj/python-ad
lib/ad/core/client.py
Client._credentials
python
def _credentials(self): creds = instance(Creds) if creds is None or not creds.principal(): m = 'No current credentials or credentials not activated.' raise ADError, m return creds
Return our current AD credentials.
https://github.com/geertj/python-ad/blob/3089eae072bd2e871c11251961ec35a09b83dd38/lib/ad/core/client.py#L55-L61
import re import dns import dns.resolver import dns.exception import ldap import ldap.sasl import ldap.controls import socket from ad.core.exception import Error as ADError from ad.core.object import factory, instance from ad.core.creds import Creds from ad.core.locate import Locator from ad.core.constant import LDAP_PORT, GC_PORT from ad.protocol import krb5 from ad.util import compat class Client(object): _timelimit = 0 _sizelimit = 0 _referrals = False _pagesize = 500 def __init__(self, domain): self.m_locator = None self.m_connections = None self.m_naming_contexts = None self.m_domain = self.dn_from_domain_name(domain) self.m_forest = None self.m_schema = None self.m_configuration = None def _locator(self): if self.m_locator is None: self.m_locator = factory(Locator) return self.m_locator
MIT License
gabfl/sql2csv
src/sql2csv.py
query_to_csv
python
def query_to_csv(engine, host, user, port, password, database, query, headers=False, out_type='stdout', destination_file=None, delimiter=',', quotechar='"', print_info=1000): connection = get_connection( engine=engine, host=host, user=user, port=port, password=password, database=database ) cursor = get_cursor(connection) if out_type == 'file': print('\n* Exporting rows...') with open_tempfile() if out_type == 'stdout' else open_file(resolve_home_dir(destination_file)) as file_: writer = get_writer(file_, delimiter=delimiter, quotechar=quotechar) execute_query(cursor=cursor, query=query) if headers: writer.writerow(fetch_headers(cursor=cursor)) i = 0 for row in fetch_rows(cursor=cursor): i += 1 if out_type == 'file' and i % print_info == 0: print(' ...%s rows written' % "{:,}".format(i)) row = stringify_items(row) writer.writerow(row) if out_type == 'file': print(' ...done') print('* The result has been exported to %s.\n' % (destination_file)) if out_type == 'stdout': file_to_stdout()
Run a query and store the result to a CSV file
https://github.com/gabfl/sql2csv/blob/44f18eb4243785615f24cf2169b33cc6a125f7d4/src/sql2csv.py#L217-L264
import sys import csv import tempfile from os.path import expanduser import json import argparse import pymysql.cursors import pymysql.constants.CLIENT import psycopg2.extras import psycopg2 file_ = None def get_mysql_connection(host, user, port, password, database): return pymysql.connect(host=host, user=user, port=port, password=password, db=database, charset='utf8mb4', client_flag=pymysql.constants.CLIENT.MULTI_STATEMENTS ) def get_pg_connection(host, user, port, password, database): return psycopg2.connect(host=host, user=user, port=port, password=password, dbname=database ) def get_connection(engine, host, user, port, password, database): if engine == 'mysql': return get_mysql_connection(host, user, port, password, database) elif engine == 'postgresql': return get_pg_connection(host, user, port, password, database) else: raise RuntimeError( '"%s" engine is not supported.' % (engine)) def get_cursor(connection): return connection.cursor() def execute_query(cursor, query): cursor.execute(query) def fetch_rows(cursor): for row in cursor.fetchall(): yield row def fetch_headers(cursor): return [i[0] for i in cursor.description] def discard_line(line): if line[:1] in ['', '+', '(', '-']: return True return False def remove_leading_trailing_pipe(line): return line.strip('|') def get_column_separator(input_): if input_.count('|') > input_.count('\t'): return '|' return '\t' def split_columns(line, separator='\t'): return line.split(separator) def strip_whitespaces(tpl): return [item.strip() for item in tpl] def stringify_items(row): row = list(row) for k, item in enumerate(row): if(isinstance(item, dict)): row[k] = json.dumps(item) return tuple(row) def has_stdin_input(): if not sys.stdin.isatty(): return True return False def resolve_home_dir(destination): if destination[:1] == '~': return expanduser("~") + destination[1:] return destination def open_file(destination): global file_ file_ = open(destination, 'w+', newline='') return file_ def open_tempfile(): global file_ file_ = tempfile.NamedTemporaryFile('w+', newline='', delete=False) return file_ def get_writer(file_, delimiter=',', quotechar='"'): return csv.writer( file_, delimiter=delimiter, quotechar=quotechar, quoting=csv.QUOTE_MINIMAL ) def file_to_stdout(): with open(file_.name) as f: print(f.read()) def stdin_to_csv(delimiter=',', quotechar='"'): with open_tempfile() as file_: writer = get_writer(file_, delimiter=delimiter, quotechar=quotechar) separator = None for line in sys.stdin: line.strip() if not discard_line(line): separator = get_column_separator( line) if not separator else separator line = remove_leading_trailing_pipe(line) if line.strip(): row = split_columns(line, separator) row = strip_whitespaces(row) writer.writerow(row) file_to_stdout()
MIT License
wavefronthq/python-client
wavefront_api_client/models/response_container_list_integration.py
ResponseContainerListIntegration.response
python
def response(self, response): self._response = response
Sets the response of this ResponseContainerListIntegration. :param response: The response of this ResponseContainerListIntegration. # noqa: E501 :type: list[Integration]
https://github.com/wavefronthq/python-client/blob/e410ce0dd8a2334e995456f4f3d44e0f04664a3a/wavefront_api_client/models/response_container_list_integration.py#L70-L78
import pprint import re import six from wavefront_api_client.configuration import Configuration class ResponseContainerListIntegration(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'response': 'list[Integration]', 'status': 'ResponseStatus' } attribute_map = { 'response': 'response', 'status': 'status' } def __init__(self, response=None, status=None, _configuration=None): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._response = None self._status = None self.discriminator = None if response is not None: self.response = response self.status = status @property def response(self): return self._response @response.setter
Apache License 2.0
yoavaviram/python-amazon-simple-product-api
amazon/api.py
AmazonAPI.search_n
python
def search_n(self, n, **kwargs): region = kwargs.get('region', self.region) kwargs.update({'region': region}) items = AmazonSearch(self.api, self.aws_associate_tag, **kwargs) return list(islice(items, n))
Search and return first N results.. :param n: An integer specifying the number of results to return. :return: A list of :class:`~.AmazonProduct`.
https://github.com/yoavaviram/python-amazon-simple-product-api/blob/84c16f519e5a349a30669bc4218b9325ca37f651/amazon/api.py#L277-L288
import datetime from itertools import islice import bottlenose from lxml import objectify, etree import dateutil.parser from decimal import Decimal DOMAINS = { 'CA': 'ca', 'DE': 'de', 'ES': 'es', 'FR': 'fr', 'IN': 'in', 'IT': 'it', 'JP': 'co.jp', 'UK': 'co.uk', 'US': 'com', 'CN': 'cn' } AMAZON_ASSOCIATES_BASE_URL = 'http://www.amazon.{domain}/dp/' class AmazonException(Exception): pass class CartException(AmazonException): pass class CartInfoMismatchException(CartException): pass class AsinNotFound(AmazonException): pass class LookupException(AmazonException): pass class SearchException(AmazonException): pass class NoMorePages(SearchException): pass class RequestThrottled(AmazonException): pass class SimilartyLookupException(AmazonException): pass class BrowseNodeLookupException(AmazonException): pass class AmazonAPI(object): def __init__(self, aws_key, aws_secret, aws_associate_tag, **kwargs): if 'region' in kwargs: kwargs['Region'] = kwargs['region'] del kwargs['region'] if 'Version' not in kwargs: kwargs['Version'] = '2013-08-01' self.api = bottlenose.Amazon( aws_key, aws_secret, aws_associate_tag, **kwargs) self.aws_associate_tag = aws_associate_tag self.region = kwargs.get('Region', 'US') def lookup(self, ResponseGroup="Large", **kwargs): response = self.api.ItemLookup(ResponseGroup=ResponseGroup, **kwargs) root = objectify.fromstring(response) if root.Items.Request.IsValid == 'False': code = root.Items.Request.Errors.Error.Code msg = root.Items.Request.Errors.Error.Message raise LookupException( u"Amazon Product Lookup Error: '{0}', '{1}'".format(code, msg)) if not hasattr(root.Items, 'Item'): raise AsinNotFound("ASIN(s) not found: '{0}'".format( etree.tostring(root, pretty_print=True))) if len(root.Items.Item) > 1: return [ AmazonProduct( item, self.aws_associate_tag, self, region=self.region) for item in root.Items.Item ] else: return AmazonProduct( root.Items.Item, self.aws_associate_tag, self, region=self.region ) def lookup_bulk(self, ResponseGroup="Large", **kwargs): response = self.api.ItemLookup(ResponseGroup=ResponseGroup, **kwargs) root = objectify.fromstring(response) if not hasattr(root.Items, 'Item'): return [] return list( AmazonProduct( item, self.aws_associate_tag, self, region=self.region) for item in root.Items.Item ) def similarity_lookup(self, ResponseGroup="Large", **kwargs): response = self.api.SimilarityLookup( ResponseGroup=ResponseGroup, **kwargs) root = objectify.fromstring(response) if root.Items.Request.IsValid == 'False': code = root.Items.Request.Errors.Error.Code msg = root.Items.Request.Errors.Error.Message raise SimilartyLookupException( "Amazon Similarty Lookup Error: '{0}', '{1}'".format( code, msg)) return [ AmazonProduct( item, self.aws_associate_tag, self.api, region=self.region ) for item in getattr(root.Items, 'Item', []) ] def browse_node_lookup(self, ResponseGroup="BrowseNodeInfo", **kwargs): response = self.api.BrowseNodeLookup( ResponseGroup=ResponseGroup, **kwargs) root = objectify.fromstring(response) if root.BrowseNodes.Request.IsValid == 'False': code = root.BrowseNodes.Request.Errors.Error.Code msg = root.BrowseNodes.Request.Errors.Error.Message raise BrowseNodeLookupException( "Amazon BrowseNode Lookup Error: '{0}', '{1}'".format( code, msg)) return [AmazonBrowseNode(node.BrowseNode) for node in root.BrowseNodes] def search(self, **kwargs): region = kwargs.get('region', self.region) kwargs.update({'region': region}) return AmazonSearch(self.api, self.aws_associate_tag, **kwargs)
Apache License 2.0
astroml/astroml
doc/sphinxext/numpy_ext/docscrape_sphinx.py
SphinxDocString._process_param
python
def _process_param(self, param, desc, fake_autosummary): param = param.strip() display_param = '**%s**' % param if not fake_autosummary: return display_param, desc param_obj = getattr(self._obj, param, None) if not (callable(param_obj) or isinstance(param_obj, property) or inspect.isgetsetdescriptor(param_obj)): param_obj = None obj_doc = pydoc.getdoc(param_obj) if not (param_obj and obj_doc): return display_param, desc prefix = getattr(self, '_name', '') if prefix: autosum_prefix = '~%s.' % prefix link_prefix = '%s.' % prefix else: autosum_prefix = '' link_prefix = '' display_param = ':obj:`{} <{}{}>`'.format(param, link_prefix, param) if obj_doc: desc = re.split('\n\\s*\n', obj_doc.strip(), 1)[0] m = re.search(r"^([A-Z].*?\.)(?:\s|$)", ' '.join(desc.split())) if m: desc = m.group(1).strip() else: desc = desc.partition('\n')[0] desc = desc.split('\n') return display_param, desc
Determine how to display a parameter Emulates autosummary behavior if fake_autosummary Parameters ---------- param : str The name of the parameter desc : list of str The parameter description as given in the docstring. This is ignored when autosummary logic applies. fake_autosummary : bool If True, autosummary-style behaviour will apply for params that are attributes of the class and have a docstring. Returns ------- display_param : str The marked up parameter name for display. This may include a link to the corresponding attribute's own documentation. desc : list of str A list of description lines. This may be identical to the input ``desc``, if ``autosum is None`` or ``param`` is not a class attribute, or it will be a summary of the class attribute's docstring. Notes ----- This does not have the autosummary functionality to display a method's signature, and hence is not used to format methods. It may be complicated to incorporate autosummary's signature mangling, as it relies on Sphinx's plugin mechanism.
https://github.com/astroml/astroml/blob/f66558232f6d33cb34ecd1bed8a80b9db7ae1c30/doc/sphinxext/numpy_ext/docscrape_sphinx.py#L83-L160
import re import inspect import textwrap import pydoc import collections import os from jinja2 import FileSystemLoader from jinja2.sandbox import SandboxedEnvironment import sphinx from sphinx.jinja2glue import BuiltinTemplateLoader from .docscrape import NumpyDocString, FunctionDoc, ClassDoc IMPORT_MATPLOTLIB_RE = r'\b(import +matplotlib|from +matplotlib +import)\b' class SphinxDocString(NumpyDocString): def __init__(self, docstring, config={}): NumpyDocString.__init__(self, docstring, config=config) self.load_config(config) def load_config(self, config): self.use_plots = config.get('use_plots', False) self.use_blockquotes = config.get('use_blockquotes', False) self.class_members_toctree = config.get('class_members_toctree', True) self.template = config.get('template', None) if self.template is None: template_dirs = [os.path.join(os.path.dirname(__file__), 'templates')] template_loader = FileSystemLoader(template_dirs) template_env = SandboxedEnvironment(loader=template_loader) self.template = template_env.get_template('numpydoc_docstring.rst') def _str_header(self, name, symbol='`'): return ['.. rubric:: ' + name, ''] def _str_field_list(self, name): return [':' + name + ':'] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): return [''] if self['Signature']: return ['``%s``' % self['Signature']] + [''] else: return [''] def _str_summary(self): return self['Summary'] + [''] def _str_extended_summary(self): return self['Extended Summary'] + [''] def _str_returns(self, name='Returns'): typed_fmt = '**%s** : %s' untyped_fmt = '**%s**' out = [] if self[name]: out += self._str_field_list(name) out += [''] for param, param_type, desc in self[name]: if param_type: out += self._str_indent([typed_fmt % (param.strip(), param_type)]) else: out += self._str_indent([untyped_fmt % param.strip()]) if desc and self.use_blockquotes: out += [''] elif not desc: desc = ['..'] out += self._str_indent(desc, 8) out += [''] return out
BSD 2-Clause Simplified License
winhamwr/neckbeard
neckbeard/cloud_resource.py
InfrastructureNode.passes_health_check
python
def passes_health_check(self): status_url = self.get_health_check_url() if not status_url: logger.info("No health check defined. Assuming healthy.") return True health_check = self._deployment_info['health_check'] status_success_string = health_check['status_contains'] timeout = health_check['status_check_timeout'] try: site_status = requests.get(status_url, timeout=timeout) except ConnectionError: logger.info("health_check unavailable for %s", self) logger.debug("status url: %s", status_url) return False except Timeout: logger.info("health_check timed out for %s", self) logger.debug("status url: %s", status_url) return False except RequestException, e: logger.info("health_check raised exception for %s", self) logger.debug("status url: %s", status_url) logger.debug("Exception: %s", e) return False if status_success_string not in site_status.text: logger.debug( "Required string not present in health_check for %s", self, ) logger.debug("status url: %s", status_url) logger.debug("Required string: %s", status_success_string) return False return True
Does this node currently pass the `health_check` as defined in its configuration. If no `health_check` is defined, returns True.
https://github.com/winhamwr/neckbeard/blob/2fbeda8f217c43936c941bb9ce3ba6d23be675f9/neckbeard/cloud_resource.py#L380-L420
import logging import time import boto.exception import dateutil.parser import requests from boto.ec2 import elb from requests.exceptions import ( ConnectionError, Timeout, RequestException, ) from simpledb import models from neckbeard.output import fab_out_opts NODE_AWS_TYPES = ['ec2', 'rds', 'elb'] EC2_RETIRED_STATES = ['shutting-down', 'terminated'] RDS_RETIRED_STATES = ['deleted'] logger = logging.getLogger('cloud_resource') fab_output_hides = fab_out_opts[logger.getEffectiveLevel()] fab_quiet = fab_output_hides + ['stderr'] REQUIRED_CONFIGURATION = { 'ec2': [ 'aws.keypair', ], } OPTIONAL_CONFIGURATION = { 'ec2': [ 'aws.elastic_ip', ], } class InfrastructureNode(models.Model): nodename = models.ItemName() generation_id = models.NumberField(required=True) deployment_name = models.Field(required=True) aws_type = models.Field(required=True) aws_id = models.Field(required=True) name = models.Field(required=True) creation_date = models.DateTimeField(required=True) is_running = models.NumberField(default=1, required=True) is_active_generation = models.NumberField(default=0, required=True) initial_deploy_complete = models.NumberField(default=0, required=True) def __init__(self, *args, **kwargs): self.ec2conn = None self.rdsconn = None self.elbconn = None self._boto_instance = None self._deployment_info = None super(InfrastructureNode, self).__init__(*args, **kwargs) def __str__(self): if self.aws_type in NODE_AWS_TYPES: output_str = '%s:%s[%s]<%s>' % ( self.aws_type, self.name, self.aws_id, self.creation_date, ) return output_str return super(InfrastructureNode, self).__str__() def save(self): logger.critical("Called save on %s", self) return def get_status_output(self): if self.aws_type in NODE_AWS_TYPES: status_str = '' if not self.is_running: status_str += 'RETIRED-' else: if self.is_operational: status_str += 'UP-' else: status_str += 'INACTIVE-' if not self.is_healthy: status_str += 'UNHEALTHY-' return "%s-%s" % (status_str, self) return "UNKNOWN-%s" % self def set_aws_conns(self, ec2conn, rdsconn): self.ec2conn = ec2conn self.rdsconn = rdsconn def set_deployment_info(self, deployment_info): self._deployment_info = deployment_info def is_actually_running(self): if self.aws_type == 'ec2': if self.boto_instance: if self.boto_instance.state not in EC2_RETIRED_STATES: return True elif self.aws_type == 'rds': if self.boto_instance: if self.boto_instance.status not in RDS_RETIRED_STATES: return True return False def terminate(self): if (self.is_active_generation and self.is_operational): raise Exception("Can't hard-terminate an active, operational node") if self.aws_type == 'ec2': if self.is_actually_running(): self.boto_instance.terminate() elif self.aws_type == 'rds': if self.is_actually_running(): final_snapshot = self._deployment_info.get( 'final_snapshot', None, ) if final_snapshot: self.boto_instance.stop( skip_final_snapshot=False, final_snapshot_id=final_snapshot, ) else: self.boto_instance.stop( skip_final_snapshot=True, final_snapshot_id=None) self.is_running = 0 self.save() def retire(self): if (self.is_active_generation and self.is_operational): raise Exception("Can't retire an active, operational node") self.is_running = 0 self.save() def make_temporarily_inoperative(self): if self.aws_type == 'ec2': self._remove_from_loadbalancer() elif self.aws_type == 'rds': pass def _remove_from_loadbalancer(self): if self.aws_type != 'ec2': return loadbalancer = self.get_loadbalancer() if not loadbalancer: return if not self._instance_in_load_balancer(): logger.debug( "_remove_from_loadbalancer: Instance %s not in loadbalancer", self.boto_instance, ) return logger.info( "Removing node from loadbalancer: %s", loadbalancer, ) loadbalancer.deregister_instances([self.aws_id]) def make_fully_inoperative(self): if self.aws_type == 'ec2': elastic_ip = self.get_elastic_ip() if elastic_ip and elastic_ip.instance_id: if elastic_ip.instance_id == self.boto_instance.id: logger.info( "Dissociating elastic IP %s from instance %s", elastic_ip, elastic_ip.instance_id, ) self.ec2conn.disassociate_address(elastic_ip.public_ip) self._remove_from_loadbalancer() elif self.aws_type == 'rds': pass def refresh_boto_instance(self): self._boto_instance = None @property def boto_instance(self): if not self._boto_instance: if self.aws_type == 'ec2': reservations = self.ec2conn.get_all_instances( instance_ids=[self.aws_id]) if len(reservations) == 1: self._boto_instance = reservations[0].instances[0] elif self.aws_type == 'rds': try: db_instances = self.rdsconn.get_all_dbinstances( instance_id=self.aws_id) except boto.exception.BotoServerError: return self._boto_instance if len(db_instances) == 1: self._boto_instance = db_instances[0] return self._boto_instance @property def launch_time(self): if not self.boto_instance: return None if self.aws_type == 'ec2': return dateutil.parser.parse(self.boto_instance.launch_time) elif self.aws_type == 'rds': return dateutil.parser.parse(self.boto_instance.create_time) def _instance_in_load_balancer(self): loadbalancer = self.get_loadbalancer() if self.boto_instance is None: return False if loadbalancer is None: return False ids_in_lb = [i.id for i in loadbalancer.instances] return self.boto_instance.id in ids_in_lb @property def is_operational(self): if not self.boto_instance: return False if not self._deployment_info: logger.critical( "No deployment configuration found for node: %s", self, ) logger.critical( "Unable to determine operational status. " "Assuming NOT operational." ) return False if self.aws_type == 'ec2': key_name = self._deployment_info['aws']['keypair'] elastic_ip = self.get_elastic_ip() loadbalancer = self.get_loadbalancer() if self.boto_instance.state != 'running': logger.debug( "is_operational: Instance %s not running", self.boto_instance, ) return False if self.boto_instance.key_name != key_name: logger.debug( "is_operational: Instance %s has wrong key", self.boto_instance, ) return False if elastic_ip: if self.boto_instance.id != elastic_ip.instance_id: logger.debug( "is_operational: Instance %s has wrong elastic ip", self.boto_instance, ) return False if loadbalancer: if not self._instance_in_load_balancer(): logger.debug( "is_operational: Instance %s not in loadbalancer", self.boto_instance, ) logger.debug( 'Instances in loadbalancer: %s', loadbalancer.instances, ) return False health_list = loadbalancer.get_instance_health( instances=[self.aws_id]) assert len(health_list) == 1 if health_list[0].state != 'InService': logger.debug( "is_operational: Node %s not healthy in loadbalancer.", self.boto_instance, ) logger.debug("LB health state: %s", health_list[0].state) return False return True elif self.aws_type == 'rds': if self.boto_instance.status != 'available': logger.debug( "is_operational: Instance %s not available", self.boto_instance, ) return False return True return False def get_health_check_url(self): if 'health_check' not in self._deployment_info: return None if not self.boto_instance.public_dns_name: logger.debug( "No health check url due to no public dns name", ) return None health_check = self._deployment_info['health_check'] status_url = health_check['status_url'] status_url = 'http://%s%s' % ( self.boto_instance.public_dns_name, status_url, ) return status_url
BSD 3-Clause New or Revised License
chengyangfu/pytorch-twitch-lol
vgg.py
vgg19
python
def vgg19(pretrained=False, **kwargs): model = VGG(make_layers(cfg['E']), **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['vgg19'])) return model
VGG 19-layer model (configuration "E") Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
https://github.com/chengyangfu/pytorch-twitch-lol/blob/618fe5a268c68d5c3e91850cc73e93a1fffb2b8a/vgg.py#L164-L173
import math import torch.nn as nn import torch.utils.model_zoo as model_zoo from torch.nn.parameter import Parameter __all__ = [ 'VGG', 'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn', 'vgg19_bn', 'vgg19', ] model_urls = { 'vgg11': 'https://download.pytorch.org/models/vgg11-bbd30ac9.pth', 'vgg13': 'https://download.pytorch.org/models/vgg13-c768596a.pth', 'vgg16': 'https://download.pytorch.org/models/vgg16-397923af.pth', 'vgg19': 'https://download.pytorch.org/models/vgg19-dcbb9e9d.pth', } class VGG(nn.Module): def __init__(self, features, num_classes=1000): super(VGG, self).__init__() self.features = features self.classifier = nn.Sequential( nn.Linear(512 * 7 * 7, 4096), nn.ReLU(True), nn.Dropout(), nn.Linear(4096, 4096), nn.ReLU(True), ) self.final = nn.Linear(4096, num_classes) self._initialize_weights() def forward(self, img): conv_feature = self.features(img) fc_feature = conv_feature.view(conv_feature.size(0), -1) last_feature = self.classifier(fc_feature) prediction = self.final(last_feature) return prediction def _initialize_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) if m.bias is not None: m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.Linear): n = m.weight.size(1) m.weight.data.normal_(0, 0.01) m.bias.data.zero_() def load_pretrained_model(self, state_dict): own_state = self.state_dict() for name, param in state_dict.items(): if name not in own_state: print "=>{} is in the pretrained model, but not in the current model".format(name) continue print "=> loading {}".format(name) if isinstance(param, Parameter): param = param.data own_state[name].copy_(param) missing = set(own_state.keys()) - set(state_dict.keys()) print ("The following parameters are not set by the pre-trained model:", missing) def make_layers(cfg, batch_norm=False): layers = [] in_channels = 3 for v in cfg: if v == 'M': layers += [nn.MaxPool2d(kernel_size=2, stride=2)] else: conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1) if batch_norm: layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)] else: layers += [conv2d, nn.ReLU(inplace=True)] in_channels = v return nn.Sequential(*layers) cfg = { 'A': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], 'B': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'], 'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'], 'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'], } def vgg11(pretrained=False, **kwargs): model = VGG(make_layers(cfg['A']), **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['vgg11'])) return model def vgg11_bn(**kwargs): return VGG(make_layers(cfg['A'], batch_norm=True), **kwargs) def vgg13(pretrained=False, **kwargs): model = VGG(make_layers(cfg['B']), **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['vgg13'])) return model def vgg13_bn(**kwargs): return VGG(make_layers(cfg['B'], batch_norm=True), **kwargs) def vgg16(pretrained=False, **kwargs): model = VGG(make_layers(cfg['D']), **kwargs) if pretrained: model.load_pretrained_model(model_zoo.load_url(model_urls['vgg16'])) return model def vgg16_bn(**kwargs): return VGG(make_layers(cfg['D'], batch_norm=True), **kwargs)
MIT License
whimian/pygeostatistics
pygeostatistics/gslib_reader.py
SpatialData._read_data
python
def _read_data(self): column_name = [] with open(self.datafl, 'r') as fin: _ = fin.readline().strip() ncols = int(fin.readline().strip()) for _ in range(ncols): column_name.append(fin.readline().strip()) self.property_name = [item for item in column_name if item not in ['x', 'y', 'z']] df = pd.read_csv(self.datafl, sep='\t', header=None, names=column_name, skiprows=ncols+2) if 'z' not in column_name: self._2d = True column_name.append('z') df['z'] = 0 self.df = df data_dtype = np.dtype({ 'names': column_name, 'formats': ['f8'] * len(column_name)}) self.vr = np.core.records.fromarrays( df.values.transpose(), dtype=data_dtype)
read gslib file
https://github.com/whimian/pygeostatistics/blob/e119c4e47c57e0dc1ba3ff13e45782d0e33e0c36/pygeostatistics/gslib_reader.py#L25-L50
from __future__ import absolute_import, division, print_function __author__ = "yuhao" import numpy as np import pandas as pd from scipy.spatial.distance import pdist from mpl_toolkits.mplot3d import Axes3D class SpatialData(object): def __init__(self, file_path): self.datafl = file_path self.vr = None self.property_name = None self._2d = False self._read_data()
MIT License
facebookresearch/blink
elq/biencoder/biencoder.py
GetContextEmbedsHead.__init__
python
def __init__(self, mention_aggregation_type, ctxt_output_dim, cand_output_dim, dropout=0.1): super(GetContextEmbedsHead, self).__init__() self.mention_aggregation_type = mention_aggregation_type.split('_') self.tokens_to_aggregate = self.mention_aggregation_type[0] self.aggregate_method = "_".join(self.mention_aggregation_type[1:]) self.dropout = nn.Dropout(dropout) if self.mention_aggregation_type == 'all_avg' or self.mention_aggregation_type == 'none': assert ctxt_output_dim == cand_output_dim if self.aggregate_method == 'linear': self.mention_agg_linear = nn.Linear(ctxt_output_dim * 2, cand_output_dim) elif self.aggregate_method == 'avg_linear': self.mention_agg_linear = nn.Linear(ctxt_output_dim, cand_output_dim) elif self.aggregate_method == 'mlp': self.mention_agg_mlp = nn.Sequential( nn.Linear(bert_output_dim, bert_output_dim), nn.ReLU(), nn.Dropout(0.1), nn.Linear(bert_output_dim, output_dim), ) else: self.mention_agg_mlp = None
mention_aggregation_type `all_avg`: average across tokens in mention `fl_avg`: to average across first/last tokens in mention `{all/fl}_linear`: for linear layer over mention `{all/fl}_mlp` to MLP over mention
https://github.com/facebookresearch/blink/blob/5fe254dd64d37332347edc73738edcb56096183f/elq/biencoder/biencoder.py#L146-L175
import os import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from tqdm import tqdm from collections import OrderedDict from pytorch_transformers.modeling_bert import ( BertPreTrainedModel, BertConfig, BertModel, ) from pytorch_transformers.tokenization_bert import BertTokenizer from elq.common.ranker_base import BertEncoder, get_model_obj from blink.common.optimizer import get_bert_optimizer from elq.biencoder.allennlp_span_utils import batched_span_select, batched_index_select from elq.biencoder.utils import batch_reshape_mask_left def load_biencoder(params): biencoder = BiEncoderRanker(params) return biencoder def get_submodel_from_state_dict(state_dict, prefix): new_state_dict = OrderedDict() for key, value in state_dict.items(): if key.startswith(prefix): key = key[len(prefix)+1:] new_state_dict[key] = value return new_state_dict class MentionScoresHead(nn.Module): def __init__( self, bert_output_dim, scoring_method="qa_linear", max_mention_length=10, ): super(MentionScoresHead, self).__init__() self.scoring_method = scoring_method self.max_mention_length = max_mention_length if self.scoring_method == "qa_linear": self.bound_classifier = nn.Linear(bert_output_dim, 3) elif self.scoring_method == "qa_mlp" or self.scoring_method == "qa": self.bound_classifier = nn.Sequential( nn.Linear(bert_output_dim, bert_output_dim), nn.ReLU(), nn.Dropout(0.1), nn.Linear(bert_output_dim, 3), ) else: raise NotImplementedError() def forward(self, bert_output, mask_ctxt): logits = self.bound_classifier(bert_output) if self.scoring_method[:2] == "qa": start_logprobs, end_logprobs, mention_logprobs = logits.split(1, dim=-1) start_logprobs = start_logprobs.squeeze(-1) end_logprobs = end_logprobs.squeeze(-1) mention_logprobs = mention_logprobs.squeeze(-1) start_logprobs[~mask_ctxt] = -float("Inf") end_logprobs[~mask_ctxt] = -float("Inf") mention_logprobs[~mask_ctxt] = -float("Inf") mention_scores = start_logprobs.unsqueeze(2) + end_logprobs.unsqueeze(1) mention_cum_scores = torch.zeros(mention_scores.size(), dtype=mention_scores.dtype).to(mention_scores.device) mention_logprobs_end_cumsum = torch.zeros(mask_ctxt.size(0), dtype=mention_scores.dtype).to(mention_scores.device) for i in range(mask_ctxt.size(1)): mention_logprobs_end_cumsum += mention_logprobs[:,i] mention_cum_scores[:,:,i] += mention_logprobs_end_cumsum.unsqueeze(-1) mention_logprobs_start_cumsum = torch.zeros(mask_ctxt.size(0), dtype=mention_scores.dtype).to(mention_scores.device) for i in range(mask_ctxt.size(1)-1): mention_logprobs_start_cumsum += mention_logprobs[:,i] mention_cum_scores[:,(i+1),:] -= mention_logprobs_start_cumsum.unsqueeze(-1) mention_scores += mention_cum_scores mention_bounds = torch.stack([ torch.arange(mention_scores.size(1)).unsqueeze(-1).expand(mention_scores.size(1), mention_scores.size(2)), torch.arange(mention_scores.size(1)).unsqueeze(0).expand(mention_scores.size(1), mention_scores.size(2)), ], dim=-1).to(mask_ctxt.device) mention_sizes = mention_bounds[:,:,1] - mention_bounds[:,:,0] + 1 valid_mask = (mention_sizes.unsqueeze(0) > 0) & mask_ctxt.unsqueeze(1) mention_scores[~valid_mask] = -float("inf") mention_scores = mention_scores.view(mention_scores.size(0), -1) mention_bounds = mention_bounds.view(-1, 2) mention_bounds = mention_bounds.unsqueeze(0).expand(mention_scores.size(0), mention_scores.size(1), 2) if self.max_mention_length is not None: mention_scores, mention_bounds = self.filter_by_mention_size( mention_scores, mention_bounds, self.max_mention_length, ) return mention_scores, mention_bounds def filter_by_mention_size(self, mention_scores, mention_bounds, max_mention_length): mention_bounds_mask = (mention_bounds[:,:,1] - mention_bounds[:,:,0] <= max_mention_length) mention_scores = mention_scores[mention_bounds_mask] mention_scores = mention_scores.view(mention_bounds_mask.size(0),-1) mention_bounds = mention_bounds[mention_bounds_mask] mention_bounds = mention_bounds.view(mention_bounds_mask.size(0),-1,2) return mention_scores, mention_bounds class GetContextEmbedsHead(nn.Module):
MIT License
bovee/aston
aston/trace/math_traces.py
movingaverage
python
def movingaverage(arr, window): m = np.ones(int(window)) / int(window) return scipy.ndimage.convolve1d(arr, m, axis=0, mode='reflect')
Calculates the moving average ("rolling mean") of an array of a certain window size.
https://github.com/bovee/aston/blob/315871346df72b3e8fcfa9943e8a3519e60299ff/aston/trace/math_traces.py#L73-L79
import struct import zlib import numpy as np import scipy.ndimage from aston.trace import Chromatogram, Trace def series_from_str(val, times, name=''): def is_num(x): try: float(x) return True except ValueError: return False if ',' in val: tpts = dict([tpt.split(':') for tpt in val.split(',')]) valid_x = [v for v in tpts if is_num(v)] x = np.array([float(v) for v in valid_x]) y = np.array([float(tpts[v]) for v in valid_x]) srt_ind = np.argsort(x) if 'S' in tpts: d = np.interp(times, x[srt_ind], y[srt_ind], float(tpts['S'])) else: d = np.interp(times, x[srt_ind], y[srt_ind]) elif is_num(val): d = np.ones(times.shape) * float(val) else: d = np.ones(times.shape) * np.nan return Trace(d, times, name=name) def fft(ts): t_step = ts.index[1] - ts.index[0] oc = np.abs(np.fft.fftshift(np.fft.fft(ts.values))) / len(ts.values) t = np.fft.fftshift(np.fft.fftfreq(len(oc), d=t_step)) return Trace(oc, t) def ifft(ic, t): raise NotImplementedError def noisefilter(arr, bandwidth=0.2): i = np.fft.fftshift(np.fft.fft(arr)) p = np.zeros(len(i), dtype=complex) c1 = len(i) / 2 r = float(bandwidth) r = int((r * len(i)) / 2) for i in range(c1 - r, c1 + r): p[i] = i[i] return np.real(np.fft.ifft(np.fft.ifftshift(p)))
BSD 3-Clause New or Revised License
tdicola/smartalarmclock
SmartAlarmClock/www/temboo/Library/Google/Calendar/UpdateCalendar.py
UpdateCalendar.__init__
python
def __init__(self, temboo_session): Choreography.__init__(self, temboo_session, '/Library/Google/Calendar/UpdateCalendar')
Create a new instance of the UpdateCalendar Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied.
https://github.com/tdicola/smartalarmclock/blob/0cc704c3998a724942e4c44e9692aaefe50baa0a/SmartAlarmClock/www/temboo/Library/Google/Calendar/UpdateCalendar.py#L21-L26
from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class UpdateCalendar(Choreography):
MIT License
microsoft/nn-meter
nn_meter/prediction/predictors/predict_by_kernel.py
predict_model
python
def predict_model(model, predictors): py = 0 dicts = {} for layer in model: kernel = list(model[layer].keys())[0] features = model[layer][kernel] rkernel = merge_conv_kernels(kernel) if rkernel not in dicts: dicts[rkernel] = [] dicts[rkernel].append(features) for kernel in dicts: kernelname = get_kernel_name(kernel) if kernelname in predictors: pred = predictors[kernelname] pys = pred.predict(dicts[kernel]) if len(pys) != 0: py += sum(pys) return py
@params: model: the model config with prediction features predictors: loaded pkl predictors
https://github.com/microsoft/nn-meter/blob/a478371d309a53fdd19f4d1c497017c5aabb902a/nn_meter/prediction/predictors/predict_by_kernel.py#L19-L43
from .utils import get_kernel_name from .extract_feature import get_predict_features def merge_conv_kernels(kernelname): if "conv" in kernelname and "dwconv" not in kernelname: return "conv-bn-relu" elif "dwconv" in kernelname: return "dwconv-bn-relu" else: return kernelname
MIT License
google/tensornetwork
tensornetwork/backends/pytorch/decompositions.py
svd
python
def svd( torch: Any, tensor: Tensor, pivot_axis: int, max_singular_values: Optional[int] = None, max_truncation_error: Optional[float] = None, relative: Optional[bool] = False) -> Tuple[Tensor, Tensor, Tensor, Tensor]: left_dims = list(tensor.shape)[:pivot_axis] right_dims = list(tensor.shape)[pivot_axis:] tensor = torch.reshape(tensor, (np.prod(left_dims), np.prod(right_dims))) u, s, v = torch.svd(tensor) if max_singular_values is None: max_singular_values = s.nelement() if max_truncation_error is not None: s_sorted, _ = torch.sort(s**2) trunc_errs = torch.sqrt(torch.cumsum(s_sorted, 0)) if relative: abs_max_truncation_error = max_truncation_error * s[0] else: abs_max_truncation_error = max_truncation_error num_sing_vals_err = torch.nonzero( trunc_errs > abs_max_truncation_error).nelement() else: num_sing_vals_err = max_singular_values num_sing_vals_keep = min(max_singular_values, num_sing_vals_err) s = s.type(tensor.type()) s_rest = s[num_sing_vals_keep:] s = s[:num_sing_vals_keep] u = u[:, :num_sing_vals_keep] v = v[:, :num_sing_vals_keep] vh = torch.transpose(v, 0, 1) dim_s = s.shape[0] u = torch.reshape(u, left_dims + [dim_s]) vh = torch.reshape(vh, [dim_s] + right_dims) return u, s, vh, s_rest
Computes the singular value decomposition (SVD) of a tensor. The SVD is performed by treating the tensor as a matrix, with an effective left (row) index resulting from combining the axes `tensor.shape[:pivot_axis]` and an effective right (column) index resulting from combining the axes `tensor.shape[pivot_axis:]`. For example, if `tensor` had a shape (2, 3, 4, 5) and `pivot_axis` was 2, then `u` would have shape (2, 3, 6), `s` would have shape (6), and `vh` would have shape (6, 4, 5). If `max_singular_values` is set to an integer, the SVD is truncated to keep at most this many singular values. If `max_truncation_error > 0`, as many singular values will be truncated as possible, so that the truncation error (the norm of discarded singular values) is at most `max_truncation_error`. If `relative` is set `True` then `max_truncation_err` is understood relative to the largest singular value. If both `max_singular_values` snd `max_truncation_error` are specified, the number of retained singular values will be `min(max_singular_values, nsv_auto_trunc)`, where `nsv_auto_trunc` is the number of singular values that must be kept to maintain a truncation error smaller than `max_truncation_error`. The output consists of three tensors `u, s, vh` such that: ```python u[i1,...,iN, j] * s[j] * vh[j, k1,...,kM] == tensor[i1,...,iN, k1,...,kM] ``` Note that the output ordering matches numpy.linalg.svd rather than tf.svd. Args: tf: The tensorflow module. tensor: A tensor to be decomposed. pivot_axis: Where to split the tensor's axes before flattening into a matrix. max_singular_values: The number of singular values to keep, or `None` to keep them all. max_truncation_error: The maximum allowed truncation error or `None` to not do any truncation. relative: Multiply `max_truncation_err` with the largest singular value. Returns: u: Left tensor factor. s: Vector of ordered singular values from largest to smallest. vh: Right tensor factor. s_rest: Vector of discarded singular values (length zero if no truncation).
https://github.com/google/tensornetwork/blob/e12580f1749493dbe05f474d2fecdec4eaba73c5/tensornetwork/backends/pytorch/decompositions.py#L22-L121
from typing import Optional, Tuple, Any import numpy as np Tensor = Any
Apache License 2.0
yfauser/planespotter
app-server/app/lib/python2.7/site-packages/flask_restless/helpers.py
assign_attributes
python
def assign_attributes(model, **kwargs): cls = type(model) for field, value in kwargs.items(): if not hasattr(cls, field): msg = '{0} has no field named "{1!r}"'.format(cls.__name__, field) raise TypeError(msg) setattr(model, field, value)
Assign all attributes from the supplied `kwargs` dictionary to the model. This does the same thing as the default declarative constructor, when provided a dictionary of attributes and values.
https://github.com/yfauser/planespotter/blob/d400216502b6b5592a4889eb9fa277b2ddb75f9b/app-server/app/lib/python2.7/site-packages/flask_restless/helpers.py#L198-L209
import datetime import inspect import uuid from dateutil.parser import parse as parse_datetime from sqlalchemy import Date from sqlalchemy import DateTime from sqlalchemy import Interval from sqlalchemy.exc import NoInspectionAvailable from sqlalchemy.exc import OperationalError from sqlalchemy.ext.associationproxy import AssociationProxy from sqlalchemy.ext import hybrid from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import ColumnProperty from sqlalchemy.orm import RelationshipProperty as RelProperty from sqlalchemy.orm.attributes import InstrumentedAttribute from sqlalchemy.orm.attributes import QueryableAttribute from sqlalchemy.orm.query import Query from sqlalchemy.sql import func from sqlalchemy.sql.expression import ColumnElement from sqlalchemy.inspection import inspect as sqlalchemy_inspect RELATION_BLACKLIST = ('query', 'query_class', '_sa_class_manager', '_decl_class_registry') COLUMN_BLACKLIST = ('_sa_polymorphic_on', ) COLUMN_TYPES = (InstrumentedAttribute, hybrid_property) CURRENT_TIME_MARKERS = ('CURRENT_TIMESTAMP', 'CURRENT_DATE', 'LOCALTIMESTAMP') def partition(l, condition): return [x for x in l if condition(x)], [x for x in l if not condition(x)] def session_query(session, model): if hasattr(model, 'query'): if callable(model.query): query = model.query() else: query = model.query if hasattr(query, 'filter'): return query return session.query(model) def upper_keys(d): return dict(zip((k.upper() for k in d.keys()), d.values())) def get_columns(model): columns = {} for superclass in model.__mro__: for name, column in superclass.__dict__.items(): if isinstance(column, COLUMN_TYPES): columns[name] = column return columns def get_relations(model): return [k for k in dir(model) if not (k.startswith('__') or k in RELATION_BLACKLIST) and get_related_model(model, k)] def get_related_model(model, relationname): if hasattr(model, relationname): attr = getattr(model, relationname) if hasattr(attr, 'property') and isinstance(attr.property, RelProperty): return attr.property.mapper.class_ if isinstance(attr, AssociationProxy): return get_related_association_proxy_model(attr) return None def get_related_association_proxy_model(attr): prop = attr.remote_attr.property for attribute in ('mapper', 'parent'): if hasattr(prop, attribute): return getattr(prop, attribute).class_ return None def has_field(model, fieldname): descriptors = sqlalchemy_inspect(model).all_orm_descriptors._data if fieldname in descriptors and hasattr(descriptors[fieldname], 'fset'): return descriptors[fieldname].fset is not None return hasattr(model, fieldname) def get_field_type(model, fieldname): field = getattr(model, fieldname) if isinstance(field, ColumnElement): fieldtype = field.type else: if isinstance(field, AssociationProxy): field = field.remote_attr if hasattr(field, 'property'): prop = field.property if isinstance(prop, RelProperty): return None fieldtype = prop.columns[0].type else: return None return fieldtype def is_date_field(model, fieldname): fieldtype = get_field_type(model, fieldname) return isinstance(fieldtype, Date) or isinstance(fieldtype, DateTime) def is_interval_field(model, fieldname): fieldtype = get_field_type(model, fieldname) return isinstance(fieldtype, Interval)
MIT License
openstack/cyborg
cyborg/db/sqlalchemy/api.py
Connection.attach_handle_allocate
python
def attach_handle_allocate(self, context, deployable_id): try: ah = self._do_allocate_attach_handle( context, deployable_id) except NoResultFound: msg = 'Matching deployable_id {0}'.format(deployable_id) raise exception.ResourceNotFound( resource='AttachHandle', msg=msg) return ah
Allocate an attach handle with given deployable. To allocate is to get an unused resource and mark it as in_use.
https://github.com/openstack/cyborg/blob/17b18248f45aafa0cbc04a3307bd83f3deceb8e1/cyborg/db/sqlalchemy/api.py#L284-L296
import copy import threading import uuid from oslo_db import api as oslo_db_api from oslo_db import exception as db_exc from oslo_db.sqlalchemy import enginefacade from oslo_db.sqlalchemy import utils as sqlalchemyutils from oslo_log import log from oslo_utils import strutils from oslo_utils import timeutils from oslo_utils import uuidutils from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm import load_only from cyborg.common import exception from cyborg.common.i18n import _ from cyborg.db import api from cyborg.db.sqlalchemy import models _CONTEXT = threading.local() LOG = log.getLogger(__name__) main_context_manager = enginefacade.transaction_context() def get_backend(): return Connection() def _session_for_read(): return enginefacade.reader.using(_CONTEXT) def _session_for_write(): return enginefacade.writer.using(_CONTEXT) def get_session(use_slave=False, **kwargs): return main_context_manager._factory.get_legacy_facade().get_session( use_slave=use_slave, **kwargs) def model_query(context, model, *args, **kwargs): if kwargs.pop("project_only", False): kwargs["project_id"] = context.tenant with _session_for_read() as session: query = sqlalchemyutils.model_query( model, session, args, **kwargs) return query def add_identity_filter(query, value): if strutils.is_int_like(value): return query.filter_by(id=value) elif uuidutils.is_uuid_like(value): return query.filter_by(uuid=value) else: raise exception.InvalidIdentity(identity=value) def _paginate_query(context, model, query, limit=None, marker=None, sort_key=None, sort_dir=None): sort_keys = ['id'] if sort_key and sort_key not in sort_keys: sort_keys.insert(0, sort_key) try: query = sqlalchemyutils.paginate_query(query, model, limit, sort_keys, marker=marker, sort_dir=sort_dir) except db_exc.InvalidSortKey: raise exception.InvalidParameterValue( _('The sort_key value "%(key)s" is an invalid field for sorting') % {'key': sort_key}) return query.all() class Connection(api.Connection): def __init__(self): pass def attach_handle_create(self, context, values): if not values.get('uuid'): values['uuid'] = uuidutils.generate_uuid() attach_handle = models.AttachHandle() attach_handle.update(values) with _session_for_write() as session: try: session.add(attach_handle) session.flush() except db_exc.DBDuplicateEntry: raise exception.AttachHandleAlreadyExists(uuid=values['uuid']) return attach_handle def attach_handle_get_by_uuid(self, context, uuid): query = model_query( context, models.AttachHandle).filter_by(uuid=uuid) try: return query.one() except NoResultFound: raise exception.ResourceNotFound( resource='AttachHandle', msg='with uuid=%s' % uuid) def attach_handle_get_by_id(self, context, id): query = model_query( context, models.AttachHandle).filter_by(id=id) try: return query.one() except NoResultFound: raise exception.ResourceNotFound( resource='AttachHandle', msg='with id=%s' % id) def attach_handle_list_by_type(self, context, attach_type='PCI'): query = model_query(context, models.AttachHandle). filter_by(attach_type=attach_type) try: return query.all() except NoResultFound: raise exception.ResourceNotFound( resource='AttachHandle', msg='with type=%s' % attach_type) def attach_handle_get_by_filters(self, context, filters, sort_key='created_at', sort_dir='desc', limit=None, marker=None, join_columns=None): if limit == 0: return [] query_prefix = model_query(context, models.AttachHandle) filters = copy.deepcopy(filters) exact_match_filter_names = ['uuid', 'id', 'deployable_id', 'cpid_id'] query_prefix = self._exact_filter(models.AttachHandle, query_prefix, filters, exact_match_filter_names) if query_prefix is None: return [] return _paginate_query(context, models.AttachHandle, query_prefix, limit, marker, sort_key, sort_dir) def _exact_filter(self, model, query, filters, legal_keys=None): if filters is None: filters = {} if legal_keys is None: legal_keys = [] filter_dict = {} for key in legal_keys: if key not in filters: continue value = filters.pop(key) if isinstance(value, (list, tuple, set, frozenset)): if not value: return None column_attr = getattr(model, key) query = query.filter(column_attr.in_(value)) else: filter_dict[key] = value if filter_dict: query = query.filter(*[getattr(model, k) == v for k, v in filter_dict.items()]) return query def attach_handle_list(self, context): query = model_query(context, models.AttachHandle) return _paginate_query(context, models.AttachHandle, query=query) def attach_handle_update(self, context, uuid, values): if 'uuid' in values: msg = _("Cannot overwrite UUID for an existing AttachHandle.") raise exception.InvalidParameterValue(err=msg) return self._do_update_attach_handle(context, uuid, values) @oslo_db_api.retry_on_deadlock def _do_update_attach_handle(self, context, uuid, values): with _session_for_write(): query = model_query(context, models.AttachHandle) query = add_identity_filter(query, uuid) try: ref = query.with_for_update().one() except NoResultFound: raise exception.ResourceNotFound( resource='AttachHandle', msg='with uuid=%s' % uuid) ref.update(values) return ref @oslo_db_api.retry_on_deadlock def _do_allocate_attach_handle(self, context, deployable_id): with _session_for_write() as session: query = model_query(context, models.AttachHandle). filter_by(deployable_id=deployable_id, in_use=False) values = {"in_use": True} ref = query.with_for_update().first() if not ref: msg = 'Matching deployable_id {0}'.format(deployable_id) raise exception.ResourceNotFound( resource='AttachHandle', msg=msg) ref.update(values) session.flush() return ref
Apache License 2.0
tcalmant/ipopo
tests/services/test_mqtt.py
MqttServiceTest._setup_mqtt
python
def _setup_mqtt(self, context): config = self.config.create_factory_configuration( services.MQTT_CONNECTOR_FACTORY_PID) config.update({"host": self.HOST, "port": self.PORT}) for _ in range(10): svc_ref = context.get_service_reference( services.SERVICE_MQTT_CONNECTION, "(id={})".format(config.get_pid())) if svc_ref is not None: break time.sleep(.5) else: self.fail("Connection Service not found") return config, svc_ref
Common code for MQTT service creation
https://github.com/tcalmant/ipopo/blob/1d4b81207e67890dfccc8f562336c7104f194c17/tests/services/test_mqtt.py#L104-L123
import random import shutil import string import tempfile import time try: import unittest2 as unittest except ImportError: import unittest import pelix.framework import pelix.services as services from tests.mqtt_utilities import find_mqtt_server __version_info__ = (1, 0, 1) __version__ = ".".join(str(x) for x in __version_info__) class Listener: def __init__(self): self.messages = [] def handle_mqtt_message(self, topic, payload, qos): self.messages.append((topic, payload, qos)) class MqttServiceTest(unittest.TestCase): HOST = find_mqtt_server() PORT = 1883 def setUp(self): self.conf_dir = tempfile.mkdtemp() self.framework = pelix.framework.create_framework( ('pelix.ipopo.core', 'pelix.services.configadmin', 'pelix.services.mqtt'), {'configuration.folder': self.conf_dir}) self.framework.start() context = self.framework.get_bundle_context() self.config_ref = context.get_service_reference( services.SERVICE_CONFIGURATION_ADMIN) self.config = context.get_service(self.config_ref) def tearDown(self): pelix.framework.FrameworkFactory.delete_framework(self.framework) self.framework = None shutil.rmtree(self.conf_dir) def test_no_config(self): time.sleep(.5) context = self.framework.get_bundle_context() svc_ref = context.get_service_reference( services.SERVICE_MQTT_CONNECTION) self.assertIsNone(svc_ref, "Found a MQTT connection")
Apache License 2.0
google/seqio
seqio/vocabularies.py
ByteVocabulary._base_vocab_size
python
def _base_vocab_size(self): return self._num_special_tokens + self._byte_size
Number of ids. Returns: an integer, the vocabulary size
https://github.com/google/seqio/blob/6fae6ef97ae9d2995bd954a8bdf537c74d04e956/seqio/vocabularies.py#L414-L420
import abc import hashlib from typing import Any, Dict, Iterable, Optional, Sequence, Union from absl import logging import tensorflow.compat.v2 as tf import tensorflow_text as tf_text from sentencepiece import sentencepiece_model_pb2 import sentencepiece as sentencepiece_processor PAD_ID = 0 class Vocabulary(metaclass=abc.ABCMeta): def __init__(self, extra_ids: int = 0): self._extra_ids = extra_ids or 0 @abc.abstractproperty def eos_id(self) -> Optional[int]: raise NotImplementedError("need to implement eos_id") @property def pad_id(self) -> int: return PAD_ID @abc.abstractproperty def unk_id(self) -> Optional[int]: raise NotImplementedError("need to implement unk_id") @property def extra_ids(self) -> int: return self._extra_ids @property def vocab_size(self) -> int: return self._base_vocab_size + self.extra_ids @abc.abstractproperty def _base_vocab_size(self) -> int: raise NotImplementedError @abc.abstractmethod def _encode(self, s: str) -> Sequence[int]: raise NotImplementedError def encode(self, s: Union[Sequence[int], str]) -> Sequence[int]: return self._encode(s) @abc.abstractmethod def _decode(self, ids): raise NotImplementedError def decode(self, ids: Iterable[int]): clean_ids = list(ids) if self.unk_id is not None: vocab_size = self._base_vocab_size clean_ids = [ self.unk_id if i >= vocab_size else i for i in clean_ids ] if self.eos_id is not None and self.eos_id in clean_ids: clean_ids = clean_ids[:clean_ids.index(self.eos_id) + 1] return self._decode(clean_ids) @abc.abstractmethod def _encode_tf(self, s: tf.Tensor) -> tf.Tensor: raise NotImplementedError def encode_tf(self, s: tf.Tensor) -> tf.Tensor: return self._encode_tf(s) @abc.abstractmethod def _decode_tf(self, ids: tf.Tensor) -> tf.Tensor: raise NotImplementedError def decode_tf(self, ids: tf.Tensor) -> tf.Tensor: clean_ids = ids if self.unk_id is not None: clean_ids = tf.where( tf.less(clean_ids, self._base_vocab_size), clean_ids, self.unk_id) if self.eos_id is not None: after_eos = tf.cumsum( tf.cast(tf.equal(clean_ids, self.eos_id), tf.int32), exclusive=True, axis=-1) clean_ids = tf.where(tf.cast(after_eos, tf.bool), self.pad_id, clean_ids) return self._decode_tf(clean_ids) class PassThroughVocabulary(Vocabulary): def __init__( self, size: int, eos_id: Optional[int] = None): self._size = size self._eos_id = eos_id super().__init__() @property def _base_vocab_size(self): return self._size def _encode(self, s: Sequence[int]) -> Sequence[int]: return s def _decode(self, ids: Sequence[int]) -> Sequence[int]: return ids def _encode_tf(self, s: tf.Tensor) -> tf.Tensor: return s def _decode_tf(self, ids: tf.Tensor) -> tf.Tensor: return ids @property def eos_id(self) -> Optional[int]: return self._eos_id @property def unk_id(self) -> Optional[int]: return None def __eq__(self, other): if not isinstance(other, PassThroughVocabulary): return False return (self._size == other._size and self.eos_id == other.eos_id) class SentencePieceVocabulary(Vocabulary): def __init__(self, sentencepiece_model_file, extra_ids=None): self._sentencepiece_model_file = sentencepiece_model_file self._tokenizer = None self._sp_model = None super().__init__(extra_ids=extra_ids) def _load_model(self): with tf.io.gfile.GFile(self._sentencepiece_model_file, "rb") as f: self._sp_model = f.read() model = sentencepiece_model_pb2.ModelProto.FromString(self._sp_model) if self._extra_ids: for i in reversed(range(self._extra_ids)): model.pieces.add( piece=f"▁<extra_id_{i}>", score=0.0, type= sentencepiece_model_pb2.ModelProto.SentencePiece.USER_DEFINED) self._sp_model = model.SerializeToString() self._tokenizer = sentencepiece_processor.SentencePieceProcessor() self._tokenizer.LoadFromSerializedProto(self._sp_model) if self._tokenizer.pad_id() != PAD_ID: logging.warning( "T5 library uses PAD_ID=%s, which is different from the " "sentencepiece vocabulary, which defines pad_id=%s", PAD_ID, self._tokenizer.pad_id()) @property def eos_id(self) -> Optional[int]: return self.tokenizer.eos_id() @property def unk_id(self) -> Optional[int]: return self.tokenizer.unk_id() @property def sp_model(self): if self._sp_model is None: self._load_model() return self._sp_model @property def sentencepiece_model_file(self): return self._sentencepiece_model_file @property def tokenizer(self): if not self._tokenizer: self._load_model() return self._tokenizer @property def tf_tokenizer(self): return tf_text.SentencepieceTokenizer(model=self.sp_model) @property def vocab_size(self): return self._base_vocab_size @property def _base_vocab_size(self): return self.tokenizer.GetPieceSize() def _encode(self, s): return self.tokenizer.EncodeAsIds(s) def _decode(self, ids): ids = [ self.tokenizer.unk_id() if i >= self.tokenizer.GetPieceSize() else i for i in ids] return self.tokenizer.DecodeIds(ids) def _encode_tf(self, s): return self.tf_tokenizer.tokenize(s) def _decode_tf(self, ids): return self.tf_tokenizer.detokenize(ids) def __eq__(self, other): if not isinstance(other, SentencePieceVocabulary): return False try: their_md5 = hashlib.md5(other.sp_model).hexdigest() except AttributeError: return False our_md5 = hashlib.md5(self.sp_model).hexdigest() return our_md5 == their_md5 def __str__(self) -> str: return (f"SentencePieceVocabulary(file={self._sentencepiece_model_file}, " f"extra_ids={self.extra_ids}, " f"spm_md5={hashlib.md5(self.sp_model).hexdigest()})") class ByteVocabulary(Vocabulary): def __init__(self, extra_ids: int = 0): self._byte_size = 256 self._num_special_tokens = 3 super().__init__(extra_ids=extra_ids) @property def eos_id(self) -> Optional[int]: return 1 @property def unk_id(self) -> Optional[int]: return 2 def _convert_strings_to_ids(self, s): return list(s.encode("utf-8")) def _convert_ids_to_strings(self, ids): return bytes(ids).decode("utf-8", errors="ignore") def _filter_non_string_ids(self, ids): lower_bound = self._num_special_tokens upper_bound = self._byte_size + self._num_special_tokens return [id for id in ids if lower_bound <= id < upper_bound] @property
Apache License 2.0
zju-robotics-lab/cict
ROS/src/imu/scripts/mtdevice.py
MTDevice.GetOutputConfiguration
python
def GetOutputConfiguration(self): self._ensure_config_state() data = self.write_ack(MID.SetOutputConfiguration) output_configuration = [struct.unpack('!HH', data[o:o+4]) for o in range(0, len(data), 4)] return output_configuration
Get the output configuration of the device (mark IV).
https://github.com/zju-robotics-lab/cict/blob/ff873a03ab03d9113b8db96d26246939bb5da0d4/ROS/src/imu/scripts/mtdevice.py#L374-L380
import serial import struct import sys import getopt import time import glob import re import pprint from mtdef import MID, OutputMode, OutputSettings, MTException, Baudrates, XDIGroup, getMIDName, DeviceState, DeprecatedMID, MTErrorMessage, MTTimeoutException class MTDevice(object): def __init__(self, port, baudrate=115200, timeout=0.002, autoconf=True, config_mode=False, verbose=False): self.verbose = verbose try: self.device = serial.Serial(port, baudrate, timeout=timeout, writeTimeout=timeout) except IOError: self.device = serial.Serial(port, baudrate, timeout=timeout, writeTimeout=timeout, rtscts=True, dsrdtr=True) self.device.flushInput() self.device.flushOutput() self.timeout = 100*timeout self.state = None if autoconf: self.auto_config_legacy() else: self.mode = None self.settings = None self.length = None self.header = None if config_mode: self.GoToConfig() def write_msg(self, mid, data=b''): length = len(data) if length > 254: lendat = b'\xFF' + struct.pack('!H', length) else: lendat = struct.pack('!B', length) packet = b'\xFA\xFF' + struct.pack('!B', mid) + lendat + data packet += struct.pack('!B', 0xFF & (-(sum(map(ord,packet[1:]))))) msg = packet start = time.time() while ((time.time()-start) < self.timeout) and self.device.read(): pass self.device.write(msg) if self.verbose: print("MT: Write message id 0x%02X (%s) with %d data bytes: [%s]" % (mid, getMIDName(mid), length, ' '.join("%02X" % ord(v) for v in data))) def waitfor(self, size=1): buf = bytearray() for _ in range(100): buf.extend(self.device.read(size-len(buf))) if len(buf) == size: return buf if self.verbose: print("waiting for %d bytes, got %d so far: [%s]" % (size, len(buf), ' '.join('%02X' % v for v in buf))) raise MTTimeoutException("waiting for message") def read_data_msg(self, buf=bytearray()): start = time.time() if self.length <= 254: totlength = 5 + self.length else: totlength = 7 + self.length while (time.time()-start) < self.timeout: buf.extend(self.waitfor(totlength - len(buf))) preamble_ind = buf.find(self.header) if preamble_ind == -1: if self.verbose: sys.stderr.write("MT: discarding (no preamble).\n") del buf[:-3] continue elif preamble_ind: if self.verbose: sys.stderr.write("MT: discarding (before preamble).\n") del buf[:preamble_ind] buf.extend(self.waitfor(totlength-len(buf))) if 0xFF & sum(buf[1:]): if self.verbose: sys.stderr.write("MT: invalid checksum; discarding data and" " waiting for next message.\n") del buf[:buf.find(self.header)-2] continue data = str(buf[-self.length-1:-1]) del buf[:] return data else: raise MTException("could not find MTData message.") def read_msg(self): start = time.time() while (time.time()-start) < self.timeout: if ord(self.waitfor()) != 0xFA: continue if ord(self.waitfor()) != 0xFF: continue mid, length = struct.unpack('!BB', self.waitfor(2)) if length == 255: length, = struct.unpack('!H', self.waitfor(2)) buf = self.waitfor(length+1) checksum = buf[-1] data = struct.unpack('!%dB' % length, buf[:-1]) if 0xFF & sum(data, 0xFF+mid+length+checksum): if self.verbose: sys.stderr.write("invalid checksum; discarding data and " "waiting for next message.\n") continue if self.verbose: print("MT: Got message id 0x%02X (%s) with %d data bytes: [%s]" % (mid, getMIDName(mid), length, ' '.join("%02X" % v for v in data))) if mid == MID.Error: raise MTErrorMessage(data[0]) return (mid, buf[:-1]) else: raise MTException("could not find message.") def write_ack(self, mid, data=b'', n_retries=500): self.write_msg(mid, data) for _ in range(n_retries): mid_ack, data_ack = self.read_msg() if mid_ack == (mid+1): break elif self.verbose: print("ack (0x%02X) expected, got 0x%02X instead" % (mid+1, mid_ack)) else: raise MTException("Ack (0x%02X) expected, MID 0x%02X received " "instead (after %d retries)." % (mid+1, mid_ack, n_retries)) return data_ack def _ensure_config_state(self): if self.state != DeviceState.Config: self.GoToConfig() def _ensure_measurement_state(self): if self.state != DeviceState.Measurement: self.GoToMeasurement() def Reset(self, go_to_config=False): self.write_ack(MID.Reset) if go_to_config: time.sleep(0.01) mid, _ = self.read_msg() if mid == MID.WakeUp: self.write_msg(MID.WakeUpAck) self.state = DeviceState.Config else: self.state = DeviceState.Measurement def GoToConfig(self): self.write_ack(MID.GoToConfig) self.state = DeviceState.Config def GoToMeasurement(self): self._ensure_config_state() self.write_ack(MID.GoToMeasurement) self.state = DeviceState.Measurement def GetDeviceID(self): self._ensure_config_state() data = self.write_ack(MID.ReqDID) deviceID, = struct.unpack('!I', data) return deviceID def GetProductCode(self): self._ensure_config_state() data = self.write_ack(MID.ReqProductCode) return str(data).strip() def GetFirmwareRev(self): self._ensure_config_state() data = self.write_ack(MID.ReqFWRev) major, minor, revision = struct.unpack('!BBB', data[:3]) return (major, minor, revision) def RunSelfTest(self): self._ensure_config_state() data = self.write_ack(MID.RunSelfTest) bit_names = ['accX', 'accY', 'accZ', 'gyrX', 'gyrY', 'gyrZ', 'magX', 'magY', 'magZ'] self_test_results = [] for i, name in enumerate(bit_names): self_test_results.append((name, (data >> i) & 1)) return self_test_results def GetBaudrate(self): self._ensure_config_state() data = self.write_ack(MID.SetBaudrate) return data[0] def SetBaudrate(self, brid): self._ensure_config_state() self.write_ack(MID.SetBaudrate, (brid,)) def GetErrorMode(self): self._ensure_config_state() data = self.write_ack(MID.SetErrorMode) error_mode, = struct.unpack('!H', data) return error_mode def SetErrorMode(self, error_mode): self._ensure_config_state() data = struct.pack('!H', error_mode) self.write_ack(MID.SetErrorMode, data) def GetOptionFlags(self): self._ensure_config_state() data = self.write_ack(MID.SetOptionFlags) set_flags, clear_flags = struct.unpack('!II', data) return set_flags, clear_flags def SetOptionFlags(self, set_flags, clear_flags): self._ensure_config_state() data = struct.pack('!II', set_flags, clear_flags) self.write_ack(MID.SetOptionFlags, data) def GetLocationID(self): self._ensure_config_state() data = self.write_ack(MID.SetLocationID) location_id, = struct.unpack('!H', data) return location_id def SetLocationID(self, location_id): self._ensure_config_state() data = struct.pack('!H', location_id) self.write_ack(MID.SetLocationID, data) def RestoreFactoryDefaults(self): self._ensure_config_state() self.write_ack(MID.RestoreFactoryDef) def GetTransmitDelay(self): self._ensure_config_state() data = self.write_ack(MID.SetTransmitDelay) transmit_delay, = struct.unpack('!H', data) return transmit_delay def SetTransmitDelay(self, transmit_delay): self._ensure_config_state() data = struct.pack('!H', transmit_delay) self.write_ack(MID.SetTransmitDelay, data) def GetSyncSettings(self): self._ensure_config_state() data = self.write_ack(MID.SetSyncSettings) sync_settings = [struct.unpack('!BBBBHHHH', data[o:o+12]) for o in range(0, len(data), 12)] return sync_settings def SetSyncSettings(self, sync_settings): self._ensure_config_state() data = b''.join(struct.pack('!BBBBHHHH', *sync_setting) for sync_setting in sync_settings) self.write_ack(MID.SetSyncSettings, data) def GetConfiguration(self): self._ensure_config_state() config = self.write_ack(MID.ReqConfiguration) try: masterID, period, skipfactor, _, _, _, date, time, num, deviceID, length, mode, settings = struct.unpack('!IHHHHI8s8s32x32xHIHHI8x', config) except struct.error: raise MTException("could not parse configuration.") self.mode = mode self.settings = settings self.length = length if self.length <= 254: self.header = b'\xFA\xFF\x32' + struct.pack('!B', self.length) else: self.header = b'\xFA\xFF\x32\xFF' + struct.pack('!H', self.length) conf = {'output-mode': mode, 'output-settings': settings, 'length': length, 'period': period, 'skipfactor': skipfactor, 'Master device ID': masterID, 'date': date, 'time': time, 'number of devices': num, 'device ID': deviceID} return conf
MIT License
microsoft/simplify-docx
src/simplify_docx/iterators/generic.py
register_iterator
python
def register_iterator(name: str, TAGS_TO_YIELD: Dict[str, Type[el]] = None, TAGS_TO_NEST: Dict[str, str] = None, TAGS_TO_IGNORE: Sequence[str] = None, TAGS_TO_WARN: Dict[str, str] = None, TAGS_TO_SKIP: Dict[str, Tuple[str, str]] = None, extends: Optional[Sequence[str]] = None, check_name: bool = True, ) -> None: if check_name and name in __definitions__: raise ValueError("iterator named '%s' already registered" % name) __definitions__[name] = ElementHandlers( TAGS_TO_YIELD, TAGS_TO_NEST, TAGS_TO_IGNORE, TAGS_TO_WARN, TAGS_TO_SKIP, extends=extends )
An opinionated iterator which ignores deleted and moved resources, and passes through in-line revision containers such as InsertedRun, and orientation elements like bookmarks, comments, and permissions
https://github.com/microsoft/simplify-docx/blob/a706c290a5fd61d7f10e157d01f134abb85bff65/src/simplify_docx/iterators/generic.py#L45-L70
from warnings import warn from typing import ( Optional, Tuple, Type, Dict, Sequence, NamedTuple, NewType, Callable, Generator, List ) from ..elements.base import el from ..types import xmlFragment from ..utils.tag import get_tag from ..utils.warnings import UnexpectedElementWarning FragmentIterator = NewType('FragmentIterator', Callable[[xmlFragment, Optional[str]], Generator[xmlFragment, None, None]]) class ElementHandlers(NamedTuple): TAGS_TO_YIELD: Optional[Dict[str, Type[el]]] TAGS_TO_NEST: Optional[Dict[str, str]] TAGS_TO_IGNORE: Optional[Sequence[str]] TAGS_TO_WARN: Optional[Dict[str, str]] TAGS_TO_SKIP: Optional[Dict[str, Tuple[str, str]]] extends: Optional[Sequence[str]] ElementHandlers.__new__.__defaults__ = (None,)* 6 __definitions__: Dict[str, ElementHandlers] = {} __built__: Dict[str, ElementHandlers] = {}
MIT License
pabigot/pyxb
pyxb/__init__.py
BIND.__init__
python
def __init__ (self, *args, **kw): self.__args = args self.__kw = kw
Cache parameters for subsequent binding creation. Invoke just as you would the factory for a binding class.
https://github.com/pabigot/pyxb/blob/14737c23a125fd12c954823ad64fc4497816fae3/pyxb/__init__.py#L93-L97
import logging from pyxb.utils import six _log = logging.getLogger(__name__) class cscRoot (object): def __init__ (self, *args, **kw): if issubclass(self.__class__.mro()[-2], ( list, dict )): super(cscRoot, self).__init__(*args) __version__ = '1.2.7-DEV' __url__ = 'http://pyxb.sourceforge.net' __license__ = 'Apache License 2.0' from pyxb.exceptions_ import * import pyxb.namespace class BIND (object): __args = None __kw = None
Apache License 2.0
ucb-art/bag_framework
bag/io/sim_data.py
_get_sweep_params
python
def _get_sweep_params(fname): mat = np.genfromtxt(fname, dtype=np.unicode_) header = mat[0, :] data = mat[1:, :] idx_list = [] for idx in range(len(header)): bool_vec = data[:, idx] == data[0, idx] if not np.all(bool_vec): idx_list.append(idx) header = header[idx_list] data = data[:, idx_list] last_first_idx = [np.where(data[:, idx] == data[-1, idx])[0][0] for idx in range(len(header))] order_list = np.argsort(last_first_idx) values_list = [] skip_len = 1 for idx in order_list: end_idx = last_first_idx[idx] + 1 values = data[0:end_idx:skip_len, idx] if header[idx] != 'corner': values = values.astype(np.float) skip_len *= len(values) values_list.append(values) swp_list = header[order_list][::-1].tolist() values_list.reverse() return swp_list, values_list
Parse the sweep information file and reverse engineer sweep parameters. Parameters ---------- fname : str the sweep information file name. Returns ------- swp_list : list[str] list of sweep parameter names. index 0 is the outer-most loop. values_list : list[list[float or str]] list of values list for each sweep parameter.
https://github.com/ucb-art/bag_framework/blob/8efa57ad719b2b02a005e234d87ad6f0e5e7a3de/bag/io/sim_data.py#L55-L102
import os import glob import numpy as np import h5py from .common import bag_encoding, bag_codec_error illegal_var_name = ['sweep_params'] class SweepArray(np.ndarray): def __new__(cls, data, sweep_params=None): obj = np.asarray(data).view(cls) obj.sweep_params = sweep_params return obj def __array_finalize__(self, obj): if obj is None: return self.sweep_params = getattr(obj, 'sweep_params', None) def __reduce__(self): pickled_state = super(SweepArray, self).__reduce__() new_state = pickled_state[2] + (self.sweep_params,) return pickled_state[0], pickled_state[1], new_state def __setstate__(self, state): self.sweep_params = state[-1] super(SweepArray, self).__setstate__(state[0:-1])
BSD 3-Clause New or Revised License
polyswarm/polyswarm-client
src/microengine/__main__.py
main
python
def main(log, client_log, polyswarmd_addr, keyfile, password, api_key, backend, testing, insecure_transport, allow_key_over_http, chains, log_format, artifact_type, bid_strategy, accept, exclude, filter, confidence): loglevel = getattr(logging, log.upper(), None) clientlevel = getattr(logging, client_log.upper(), None) if not isinstance(loglevel, int) or not isinstance(clientlevel, int): logging.error('invalid log level') raise FatalError('Invalid log level', 1) polyswarmd_addr = utils.finalize_polyswarmd_addr(polyswarmd_addr, api_key, allow_key_over_http, insecure_transport) if insecure_transport: warnings.warn('--insecure-transport will be removed soon. Please add http:// or https:// to polyswarmd-addr`', DeprecationWarning) logger_name, microengine_class = choose_backend(backend) bid_logger_name, bid_strategy_class = choose_bid_strategy(bid_strategy) artifact_types = None init_logging(['microengine', logger_name], log_format, loglevel) init_logging(['polyswarmclient'], log_format, clientlevel) if artifact_type: artifact_types = [ArtifactType.from_string(artifact) for artifact in artifact_type] filter_accept = filter.get('accept', []) filter_reject = filter.get('reject', []) if accept or exclude: warnings.warn('Options `--exclude|accept key:value` are deprecated, please switch to' ' `--filter accept|reject key comparison value`', DeprecationWarning) filter_accept.extend(accept) filter_reject.extend(exclude) favor = confidence.get('favor', []) penalize = confidence.get('penalize', []) microengine_class.connect(polyswarmd_addr, keyfile, password, api_key=api_key, artifact_types=artifact_types, bid_strategy=bid_strategy_class(), bounty_filter=BountyFilter(filter_accept, filter_reject), chains=set(chains), confidence_modifier=ConfidenceModifier(favor, penalize), testing=testing).run()
Entrypoint for the microengine driver
https://github.com/polyswarm/polyswarm-client/blob/5e0647511d15b3a36595fe015e7c8afad61d1f74/src/microengine/__main__.py#L138-L181
import click import importlib.util import logging import sys import warnings from polyswarmartifact import ArtifactType from polyswarmclient import utils from polyswarmclient.config import init_logging, validate_apikey from polyswarmclient.exceptions import FatalError, SecurityWarning from polyswarmclient.filters.bountyfilter import split_filter, FilterComparison, BountyFilter from polyswarmclient.filters.confidencefilter import ConfidenceModifier from polyswarmclient.filters.filter import parse_filters logger = logging.getLogger(__name__) def choose_backend(backend): backend_list = backend.split(':') module_name_string = backend_list[0] mod_spec = importlib.util.find_spec(module_name_string) or importlib.util.find_spec( 'microengine.{0}'.format(module_name_string)) if mod_spec is None: raise Exception('Microengine backend `{0}` cannot be imported as a python module.'.format(backend)) microengine_module = importlib.import_module(mod_spec.name) if hasattr(microengine_module, 'Microengine'): microengine_class = microengine_module.Microengine elif len(backend_list) == 2 and hasattr(microengine_module, backend_list[1]): microengine_class = getattr(microengine_module, backend_list[1]) else: raise Exception('No microengine backend found {0}'.format(backend)) return microengine_module.__name__, microengine_class def choose_bid_strategy(bid_strategy): mod_spec = importlib.util.find_spec(bid_strategy) or importlib.util.find_spec(f'microengine.bidstrategy.{bid_strategy}') if mod_spec is None: raise Exception('Bid strategy `{0}` cannot be imported as a python module.'.format(bid_strategy)) bid_strategy_module = importlib.import_module(mod_spec.name) if hasattr(bid_strategy_module, 'BidStrategy'): bid_strategy_class = bid_strategy_module.BidStrategy else: raise Exception('No bid strategy found {0}'.format(bid_strategy)) return bid_strategy_module.__name__, bid_strategy_class @click.command() @click.option('--log', envvar='LOG_LEVEL', default='WARNING', help='App Log level') @click.option('--client-log', envvar='CLIENT_LOG_LEVEL', default='WARNING', help='PolySwarm Client log level') @click.option('--polyswarmd-addr', envvar='POLYSWARMD_ADDR', default='https://api.polyswarm.network/v1/default', help='Address (scheme://host:port) of polyswarmd instance') @click.option('--keyfile', envvar='KEYFILE', type=click.Path(exists=True), default='keyfile', help='Keystore file containing the private key to use with this microengine') @click.option('--password', envvar='PASSWORD', prompt=True, hide_input=True, help='Password to decrypt the keyfile with') @click.option('--api-key', envvar='API_KEY', default='', callback=validate_apikey, help='API key to use with polyswarmd') @click.option('--backend', envvar='BACKEND', required=True, help='Backend to use') @click.option('--testing', default=0, help='Activate testing mode for integration testing, respond to N bounties and N offers then exit') @click.option('--insecure-transport', is_flag=True, help='Deprecated. Used only to change the default scheme to http in polyswarmd-addr if not present') @click.option('--allow-key-over-http', is_flag=True, envvar='ALLOW_KEY_OVER_HTTP', help='Force api keys over http (Not Recommended)') @click.option('--chains', multiple=True, default=['side'], help='Chain(s) to operate on') @click.option('--log-format', envvar='LOG_FORMAT', default='text', help='Log format. Can be `json` or `text` (default)') @click.option('--artifact-type', multiple=True, default=['file'], help='List of artifact types to scan') @click.option('--bid-strategy', envvar='BID_STRATEGY', default='default', help='Bid strategy for bounties') @click.option('--accept', multiple=True, default=[], callback=split_filter, help='Declared metadata in format key:value:modifier that is required to allow scans on any artifact.') @click.option('--exclude', multiple=True, default=[], callback=split_filter, help='Declared metadata in format key:value:modifier that cannot be present to allow scans on any ' 'artifact.') @click.option('--filter', multiple=True, default=[], callback=parse_filters, type=( click.Choice(['reject', 'accept']), str, click.Choice([member.value for _name, member in FilterComparison.__members__.items()]), str ), help='Add filter in format `[accept|reject] key [eq|gt|gte|lt|lte|startswith|endswith|regex] value` ' 'to accept or reject artifacts based on metadata.') @click.option('--confidence', multiple=True, default=[], callback=parse_filters, type=( click.Choice(['favor', 'penalize']), str, click.Choice([member.value for _name, member in FilterComparison.__members__.items()]), str ), help='Add filter in format `[favor|penalize] key [eq|gt|gte|lt|lte|startswith|endswith|regex] value` ' 'to modify confidence based on metadata.')
MIT License
psc-g/psc2
talks/arturia/server.py
generate_drums
python
def generate_drums(): global drum_type global drums_bundle global generated_drums global playable_notes global seed_drum_sequence global num_steps global qpm global total_seconds global temperature drums_config_id = drums_bundle.generator_details.id drums_config = drums_rnn_model.default_configs[drums_config_id] generator = drums_rnn_sequence_generator.DrumsRnnSequenceGenerator( model=drums_rnn_model.DrumsRnnModel(drums_config), details=drums_config.details, steps_per_quarter=drums_config.steps_per_quarter, checkpoint=melody_rnn_generate.get_checkpoint(), bundle=drums_bundle) generator_options = generator_pb2.GeneratorOptions() generator_options.args['temperature'].float_value = temperature generator_options.args['beam_size'].int_value = 1 generator_options.args['branch_factor'].int_value = 1 generator_options.args['steps_per_iteration'].int_value = 1 if seed_drum_sequence is None: primer_drums = magenta.music.DrumTrack([frozenset([36])]) primer_sequence = primer_drums.to_sequence(qpm=qpm) local_num_steps = num_steps else: primer_sequence = seed_drum_sequence local_num_steps = num_steps * 2 tempo = primer_sequence.tempos.add() tempo.qpm = qpm step_length = 60. / qpm / 4.0 total_seconds = local_num_steps * step_length last_end_time = (max(n.end_time for n in primer_sequence.notes) if primer_sequence.notes else 0) generator_options.generate_sections.add( start_time=last_end_time + step_length, end_time=total_seconds) generated_sequence = generator.generate(primer_sequence, generator_options) generated_sequence = sequences_lib.quantize_note_sequence(generated_sequence, 4) if seed_drum_sequence is not None: i = 0 while i < len(generated_sequence.notes): if generated_sequence.notes[i].quantized_start_step < num_steps: del generated_sequence.notes[i] else: generated_sequence.notes[i].quantized_start_step -= num_steps generated_sequence.notes[i].quantized_end_step -= num_steps i += 1 drum_pattern = [(n.pitch, n.quantized_start_step, n.quantized_end_step) for n in generated_sequence.notes] if len(playable_notes) > 0: playable_notes = SortedList([x for x in playable_notes if x.type != 'drums'], key=lambda x: x.onset) for p, s, e in drum_pattern: playable_notes.add(PlayableNote(type='drums', note=[], instrument=DRUM_MAPPING[p], onset=s)) drum_type = 'AI DRUMS!' print_status()
Generate a new drum groove by querying the model.
https://github.com/psc-g/psc2/blob/6676fc67263c9268ff65784d583cb838cfd42c28/talks/arturia/server.py#L319-L384
from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import app import sys import collections import importlib import os import OSC from sortedcontainers import SortedList import threading import tensorflow as tf import time import magenta from magenta.models.melody_rnn import melody_rnn_model from magenta.models.melody_rnn import melody_rnn_generate from magenta.models.melody_rnn import melody_rnn_sequence_generator from magenta.models.drums_rnn import drums_rnn_model from magenta.models.drums_rnn import drums_rnn_sequence_generator from magenta.music import sequences_lib from magenta.protobuf import generator_pb2 from magenta.protobuf import music_pb2 importlib.import_module('ascii_arts') import ascii_arts importlib.import_module('arturiamap') import arturiamap receive_address = ('127.0.0.1', 12345) send_address = ('127.0.0.1', 57120) server = OSC.OSCServer(receive_address) client = OSC.OSCClient() client.connect(send_address) class TimeSignature: numerator = 4 denominator = 4 time_signature = TimeSignature() qpm = magenta.music.DEFAULT_QUARTERS_PER_MINUTE num_bars = 2 num_steps = int(num_bars * time_signature.numerator * 16 / time_signature.denominator) PlayableNote = collections.namedtuple('playable_note', ['type', 'note', 'instrument', 'onset']) playable_notes = SortedList(key=lambda x: x.onset) play_loop = False seed_drum_sequence = None last_tap_onset = None beat_length = None last_first_beat = 0.0 last_first_beat_for_record = None bass_line = [] bass_volume = 5.0 chords_volume = 1.0 improv_volume = 1.0 MAX_TAP_DELAY = 5.0 min_primer_length = 20 max_robot_length = 20 accumulated_primer_melody = [] generated_melody = [] note_mapping = {i:i for i in range(21, 109)} mode = 'free' improv_status = 'psc' playable_instruments = set(['click', 'bass', 'drums', 'chords', 'stop']) gracias_splash = False BASE_MODELS_PATH = sys.argv[1] MELODY_MODEL_PATH = BASE_MODELS_PATH + '/attention_rnn.mag' DRUMS_MODEL_PATH = BASE_MODELS_PATH + '/drum_kit_rnn.mag' melody_bundle = magenta.music.read_bundle_file(MELODY_MODEL_PATH) drums_bundle = magenta.music.read_bundle_file(DRUMS_MODEL_PATH) temperature = 1.0 DRUM_MAPPING = { 36: 'kick', 35: 'kick', 38: 'snare', 27: 'snare', 28: 'snare', 31: 'snare', 32: 'snare', 33: 'snare', 34: 'snare', 37: 'snare', 39: 'snare', 40: 'snare', 56: 'snare', 65: 'snare', 66: 'snare', 75: 'snare', 85: 'snare', 42: 'clhat', 44: 'clhat', 54: 'clhat', 68: 'clhat', 69: 'clhat', 70: 'clhat', 71: 'clhat', 73: 'clhat', 78: 'clhat', 80: 'clhat', 46: 'ophat', 67: 'ophat', 72: 'ophat', 74: 'ophat', 79: 'ophat', 81: 'ophat', 45: 'lowtom', 29: 'lowtom', 41: 'lowtom', 61: 'lowtom', 64: 'lowtom', 84: 'lowtom', 48: 'midtom', 47: 'midtom', 60: 'midtom', 63: 'midtom', 77: 'midtom', 86: 'midtom', 87: 'midtom', 50: 'hightom', 30: 'hightom', 43: 'hightom', 62: 'hightom', 76: 'hightom', 83: 'hightom', 49: 'crash', 55: 'crash', 57: 'crash', 58: 'crash', 51: 'ride', 52: 'ride', 53: 'ride', 59: 'ride', 82: 'ride' } drum_type = 'STRAIGHT FROM THE BASSLINE' MIN_MIDI = 48 MAX_MIDI = 72 WHITE_CHAR = ' ' BLACK_CHAR = '-' PLAYING_CHAR = 'H' SEPARATOR_CHAR = '|' BOTTOM_CHAR = '_' WHITE_WIDTH = 5 WHITE_HEIGHT = 6 BLACK_WIDTH = 3 BLACK_HEIGHT = 4 TOP_SIDE_WHITE_WIDTH = WHITE_WIDTH - (BLACK_WIDTH - 1) // 2 TOP_MID_WHITE_WIDTH = WHITE_WIDTH - (BLACK_WIDTH - 1) KEY_LAYOUTS = [ 'lw', 'b', 'mw', 'b', 'rw', 'lw', 'b', 'mw', 'b', 'mw', 'b', 'rw' ] NoteSpec = collections.namedtuple('note_spec', ['midi', 'layout']) midi_key_status = {} midi_keys = [] def setup_keyboard(): i = 0 for key in range(MIN_MIDI, MAX_MIDI + 1): midi_key_status[key] = False midi_keys.append(NoteSpec(key, KEY_LAYOUTS[i % len(KEY_LAYOUTS)])) i += 1 def draw_keyboard(): global generated_melody global midi_keys ai_chars = ['A', 'I'] for h in range(WHITE_HEIGHT): play_char = ai_chars[h % 2] if len(generated_melody) else PLAYING_CHAR line = '' for key in midi_keys: if key.layout == 'b': if h < BLACK_HEIGHT: char_to_print = ( play_char if midi_key_status[key.midi] else BLACK_CHAR) line += char_to_print * BLACK_WIDTH continue char_to_print = play_char if midi_key_status[key.midi] else WHITE_CHAR if h < BLACK_HEIGHT: if key.layout == 'lw': line += SEPARATOR_CHAR if key.layout == 'lw' or key.layout == 'rw': line += char_to_print * TOP_SIDE_WHITE_WIDTH else: line += char_to_print * TOP_MID_WHITE_WIDTH else: line += SEPARATOR_CHAR + char_to_print * WHITE_WIDTH line += SEPARATOR_CHAR print(line) line = '' for key in midi_keys: if key.layout == 'b': continue line += '{}{}'.format(SEPARATOR_CHAR, BOTTOM_CHAR * WHITE_WIDTH) line += SEPARATOR_CHAR print(line) def set_click(): global num_bars global num_steps global time_signature global playable_notes if len(playable_notes) > 0: playable_notes = SortedList([x for x in playable_notes if x.type != 'click'], key=lambda x: x.onset) num_steps = int(num_bars * time_signature.numerator * 16 / time_signature.denominator) step = 0 beat = 0 note_length = int(16 / time_signature.denominator) while step < num_steps: if step == 0: instrument = 'click0' else: instrument = 'click1' if beat % time_signature.numerator == 0 else 'click2' playable_notes.add(PlayableNote(type='click', note=[], instrument=instrument, onset=step)) step += note_length beat += 1 def looper(): global playable_notes global last_first_beat global last_first_beat_for_record global num_steps global qpm global mode global playable_instruments global bass_volume global chords_volume global play_loop local_playable_notes = None while True: if not play_loop or len(playable_notes) == 0: time.sleep(0.5) continue step_length = 60. / qpm / 4.0 curr_step = 0 local_playable_notes = list(playable_notes) last_first_beat = time.time() if (last_first_beat_for_record is not None and last_first_beat > last_first_beat_for_record): last_first_beat_for_record = None mode = 'free' print_status() if local_playable_notes[0].onset != 0: curr_step = local_playable_notes[0].onset for playable_note in local_playable_notes: tts = step_length * (playable_note.onset - curr_step) if tts < 0: continue time.sleep(tts) curr_step = playable_note.onset if playable_note.type not in playable_instruments: continue prefix = '/stop' if playable_note.type == 'stop' else '/play' msg = OSC.OSCMessage() msg.setAddress(prefix + playable_note.instrument) note = list(playable_note.note) if playable_note.type == 'chords': note[1] *= chords_volume msg.append(note) client.send(msg) tts = step_length * (num_steps - local_playable_notes[-1].onset) if tts > 0: time.sleep(tts) def generate_melody(): global melody_bundle global accumulated_primer_melody global generated_melody global max_robot_length global qpm melody_config_id = melody_bundle.generator_details.id melody_config = melody_rnn_model.default_configs[melody_config_id] generator = melody_rnn_sequence_generator.MelodyRnnSequenceGenerator( model=melody_rnn_model.MelodyRnnModel(melody_config), details=melody_config.details, steps_per_quarter=melody_config.steps_per_quarter, checkpoint=melody_rnn_generate.get_checkpoint(), bundle=melody_bundle) generator_options = generator_pb2.GeneratorOptions() generator_options.args['temperature'].float_value = 1.0 generator_options.args['beam_size'].int_value = 1 generator_options.args['branch_factor'].int_value = 1 generator_options.args['steps_per_iteration'].int_value = 1 primer_melody = magenta.music.Melody(accumulated_primer_melody) primer_sequence = primer_melody.to_sequence(qpm=qpm) seconds_per_step = 60.0 / qpm / generator.steps_per_quarter last_end_time = (max(n.end_time for n in primer_sequence.notes) if primer_sequence.notes else 0) melody_total_seconds = last_end_time * 3 generate_section = generator_options.generate_sections.add( start_time=last_end_time + seconds_per_step, end_time=melody_total_seconds) generated_sequence = generator.generate(primer_sequence, generator_options) generated_melody = [n.pitch for n in generated_sequence.notes] generated_melody = generated_melody[len(accumulated_primer_melody):] generated_melody = generated_melody[:max_robot_length] accumulated_primer_melody = []
Apache License 2.0
andyclymer/controlboard
ControlBoard.roboFontExt/lib/modules/pyserial-2.7/serial/serialcli.py
IronSerial.getCD
python
def getCD(self): if not self._port_handle: raise portNotOpenError return self._port_handle.CDHolding
Read terminal status line: Carrier Detect
https://github.com/andyclymer/controlboard/blob/e9b56341c38b982fe22db4e40a86c6b219c85d7e/ControlBoard.roboFontExt/lib/modules/pyserial-2.7/serial/serialcli.py#L232-L235
import clr import System import System.IO.Ports from serial.serialutil import * def device(portnum): return System.IO.Ports.SerialPort.GetPortNames()[portnum] sab = System.Array[System.Byte] def as_byte_array(string): return sab([ord(x) for x in string]) class IronSerial(SerialBase): BAUDRATES = (50, 75, 110, 134, 150, 200, 300, 600, 1200, 1800, 2400, 4800, 9600, 19200, 38400, 57600, 115200) def open(self): if self._port is None: raise SerialException("Port must be configured before it can be used.") if self._isOpen: raise SerialException("Port is already open.") try: self._port_handle = System.IO.Ports.SerialPort(self.portstr) except Exception, msg: self._port_handle = None raise SerialException("could not open port %s: %s" % (self.portstr, msg)) self._reconfigurePort() self._port_handle.Open() self._isOpen = True if not self._rtscts: self.setRTS(True) self.setDTR(True) self.flushInput() self.flushOutput() def _reconfigurePort(self): if not self._port_handle: raise SerialException("Can only operate on a valid port handle") if self._timeout is None: self._port_handle.ReadTimeout = System.IO.Ports.SerialPort.InfiniteTimeout else: self._port_handle.ReadTimeout = int(self._timeout*1000) if self._writeTimeout is None: self._port_handle.WriteTimeout = System.IO.Ports.SerialPort.InfiniteTimeout else: self._port_handle.WriteTimeout = int(self._writeTimeout*1000) try: self._port_handle.BaudRate = self._baudrate except IOError, e: raise ValueError(str(e)) if self._bytesize == FIVEBITS: self._port_handle.DataBits = 5 elif self._bytesize == SIXBITS: self._port_handle.DataBits = 6 elif self._bytesize == SEVENBITS: self._port_handle.DataBits = 7 elif self._bytesize == EIGHTBITS: self._port_handle.DataBits = 8 else: raise ValueError("Unsupported number of data bits: %r" % self._bytesize) if self._parity == PARITY_NONE: self._port_handle.Parity = getattr(System.IO.Ports.Parity, 'None') elif self._parity == PARITY_EVEN: self._port_handle.Parity = System.IO.Ports.Parity.Even elif self._parity == PARITY_ODD: self._port_handle.Parity = System.IO.Ports.Parity.Odd elif self._parity == PARITY_MARK: self._port_handle.Parity = System.IO.Ports.Parity.Mark elif self._parity == PARITY_SPACE: self._port_handle.Parity = System.IO.Ports.Parity.Space else: raise ValueError("Unsupported parity mode: %r" % self._parity) if self._stopbits == STOPBITS_ONE: self._port_handle.StopBits = System.IO.Ports.StopBits.One elif self._stopbits == STOPBITS_ONE_POINT_FIVE: self._port_handle.StopBits = System.IO.Ports.StopBits.OnePointFive elif self._stopbits == STOPBITS_TWO: self._port_handle.StopBits = System.IO.Ports.StopBits.Two else: raise ValueError("Unsupported number of stop bits: %r" % self._stopbits) if self._rtscts and self._xonxoff: self._port_handle.Handshake = System.IO.Ports.Handshake.RequestToSendXOnXOff elif self._rtscts: self._port_handle.Handshake = System.IO.Ports.Handshake.RequestToSend elif self._xonxoff: self._port_handle.Handshake = System.IO.Ports.Handshake.XOnXOff else: self._port_handle.Handshake = getattr(System.IO.Ports.Handshake, 'None') def close(self): if self._isOpen: if self._port_handle: try: self._port_handle.Close() except System.IO.Ports.InvalidOperationException: pass self._port_handle = None self._isOpen = False def makeDeviceName(self, port): try: return device(port) except TypeError, e: raise SerialException(str(e)) def inWaiting(self): if not self._port_handle: raise portNotOpenError return self._port_handle.BytesToRead def read(self, size=1): if not self._port_handle: raise portNotOpenError data = bytearray() while size: try: data.append(self._port_handle.ReadByte()) except System.TimeoutException, e: break else: size -= 1 return bytes(data) def write(self, data): if not self._port_handle: raise portNotOpenError if not isinstance(data, (bytes, bytearray)): raise TypeError('expected %s or bytearray, got %s' % (bytes, type(data))) try: self._port_handle.Write(as_byte_array(data), 0, len(data)) except System.TimeoutException, e: raise writeTimeoutError return len(data) def flushInput(self): if not self._port_handle: raise portNotOpenError self._port_handle.DiscardInBuffer() def flushOutput(self): if not self._port_handle: raise portNotOpenError self._port_handle.DiscardOutBuffer() def sendBreak(self, duration=0.25): if not self._port_handle: raise portNotOpenError import time self._port_handle.BreakState = True time.sleep(duration) self._port_handle.BreakState = False def setBreak(self, level=True): if not self._port_handle: raise portNotOpenError self._port_handle.BreakState = bool(level) def setRTS(self, level=True): if not self._port_handle: raise portNotOpenError self._port_handle.RtsEnable = bool(level) def setDTR(self, level=True): if not self._port_handle: raise portNotOpenError self._port_handle.DtrEnable = bool(level) def getCTS(self): if not self._port_handle: raise portNotOpenError return self._port_handle.CtsHolding def getDSR(self): if not self._port_handle: raise portNotOpenError return self._port_handle.DsrHolding def getRI(self): if not self._port_handle: raise portNotOpenError return False
MIT License
matuiss2/jackbot
actions/macro/build/hydraden_construction.py
HydradenConstruction.handle
python
async def handle(self): await self.main.place_building(UnitTypeId.HYDRALISKDEN)
Build the hydraden
https://github.com/matuiss2/jackbot/blob/1b45ce782df666dd21996011a04c92211c0e6368/actions/macro/build/hydraden_construction.py#L22-L24
from sc2.constants import UnitTypeId class HydradenConstruction: def __init__(self, main): self.main = main async def should_handle(self): return ( self.main.can_build_unique( UnitTypeId.HYDRALISKDEN, self.main.hydradens, (self.main.lairs and self.main.pools) ) and not self.main.close_enemy_production and not self.main.floated_buildings_bm and self.main.base_amount >= 3 )
MIT License
edlanglois/mbbl-pilco
pilco/utils/numpy.py
batch_apply
python
def batch_apply(fn, x, in_dims=1, out=None): x = np.asarray(x) if in_dims == 0: batch_shape = x.shape else: batch_shape = x.shape[:-in_dims] if out is None: out = np.empty(batch_shape, dtype=object) merge_output = True else: merge_output = False for idx in np.ndindex(batch_shape): out[idx] = fn(x[idx]) if merge_output: out = np.array(out.tolist()) return out
Batch apply some function over an array-like. Args: fn: A function that maps arrays of shape [D_1, ..., D_N] to arrays with a consistent output shape [C_1, ..., C_K]. x: An array of shape [B_1, ..., B_M, D_1, ..., D_N] in_dims: The number of input dimensions to `fn`. Equal to `N` in the array shape expressions. out: An optional output array of shape [B_1, ..., B_M, C_1, ..., C_K] into which the output is placed. This will be faster as it avoids concatenation across the batch dimensions. Returns: The batched output. An array of shape [B_1, ..., B_M, C_1, ..., C_K]
https://github.com/edlanglois/mbbl-pilco/blob/66d153ac57e1fcf676faadb93e5d552d09d2a081/pilco/utils/numpy.py#L34-L70
import functools import itertools import scipy.linalg import numpy as np
MIT License
billpmurphy/hask
hask/Data/Foldable.py
null
python
def null(t): return Foldable[t].null(t)
null :: Foldable t => t a -> Bool Source Test whether the structure is empty.
https://github.com/billpmurphy/hask/blob/4609cc8d9d975f51b6ecdbd33640cdffdc28f953/hask/Data/Foldable.py#L141-L148
import functools import operator from ..lang import sig from ..lang import H from ..lang import t from ..lang import L from ..lang import Typeclass from ..lang import build_instance from ..lang import is_builtin from ..lang import List from ..lang import instance import List as DL from ..Control.Applicative import Applicative from ..Control.Monad import Monad from Eq import Eq from Num import Num from Maybe import Maybe from Ord import Ord from Ord import Ordering class Foldable(Typeclass): @classmethod def make_instance(typeclass, cls, foldr, foldr1=None, foldl=None, foldl_=None, foldl1=None, toList=None, null=None, length=None, elem=None, maximum=None, minimum=None, sum=None, product=None): if toList is None: if hasattr(cls, "__iter__"): toList = lambda x: L[iter(x)] else: toList = lambda t: foldr(lambda x, y: x ^ y, L[[]], t) foldr1 = (lambda x: DL.foldr1(toList(x))) if foldr1 is None else foldr1 foldl = (lambda x: DL.foldl(toList(x))) if foldl is None else foldl foldl_ = (lambda x: DL.foldl_(toList(x))) if foldl_ is None else foldl_ foldl1 = (lambda x: DL.foldl1(toList(x))) if foldl1 is None else foldl1 null = (lambda x: DL.null(toList(x))) if null is None else null length = (lambda x: DL.length(toList(x))) if length is None else length elem = (lambda x: DL.length(toList(x))) if length is None else length mi = (lambda x: DL.minimum(toList(x))) if minimum is None else minimum ma = (lambda x: DL.maximum(toList(x))) if maximum is None else maximum sum = (lambda x: DL.sum(toList(x))) if sum is None else sum p = (lambda x: DL.product(toList(x))) if product is None else product attrs = {"foldr":foldr, "foldr1":foldr1, "foldl":foldl, "foldl_":foldl_, "foldl1":foldl1, "toList":toList, "null":null, "length":length, "elem":elem, "maximum":ma, "minimum":mi, "sum":sum, "product":p} build_instance(Foldable, cls, attrs) if not hasattr(cls, "__len__") and not is_builtin(cls): cls.__len__ = length if not hasattr(cls, "__iter__") and not is_builtin(cls): cls.__iter__ = lambda x: iter(toList(x)) return @sig(H[(Foldable, "t")]/ (H/ "a" >> "b" >> "b") >> "b" >> t("t", "a") >> "b") def foldr(f, z, t): return Foldable[t].foldr(f, z, t) @sig(H[(Foldable, "t")]/ (H/ "a" >> "a" >> "a") >> t("t", "a") >> "a") def foldr1(f, t): return Foldable[t].foldr(f, t) @sig(H[(Foldable, "t")]/ (H/ "a" >> "a" >> "b") >> "b" >> t("t", "a") >> "b") def foldl(f, z, t): return Foldable[t].foldl(f, z, t) @sig(H[(Foldable, "t")]/ (H/ "a" >> "a" >> "b") >> "b" >> t("t", "a") >> "b") def foldl_(f, z, t): return Foldable[t].foldl_(f, z, t) @sig(H[(Foldable, "t")]/ (H/ "a" >> "a" >> "a") >> t("t", "a") >> "a") def foldl1(f, t): Foldable[t].foldl1(f, t) @sig(H[(Foldable, "t")]/ t("t", "a") >> ["a"]) def toList(t): return Foldable[t].toList(t) @sig(H[(Foldable, "t")]/ t("t", "a") >> bool)
BSD 2-Clause Simplified License
cadair/parfive
parfive/downloader.py
Downloader._get_http
python
async def _get_http(self, session, *, url, filepath_partial, chunksize=None, file_pb=None, token, overwrite, timeouts, max_splits=5, **kwargs): if chunksize is None: chunksize = self.default_chunk_size timeout = aiohttp.ClientTimeout(**timeouts) try: scheme = urllib.parse.urlparse(url).scheme if 'HTTP_PROXY' in os.environ and scheme == 'http': kwargs['proxy'] = os.environ['HTTP_PROXY'] elif 'HTTPS_PROXY' in os.environ and scheme == 'https': kwargs['proxy'] = os.environ['HTTPS_PROXY'] async with session.get(url, timeout=timeout, **kwargs) as resp: parfive.log.debug("%s request made to %s with headers=%s", resp.request_info.method, resp.request_info.url, resp.request_info.headers) parfive.log.debug("Response received from %s with headers=%s", resp.request_info.url, resp.headers) if resp.status != 200: raise FailedDownload(filepath_partial, url, resp) else: filepath, skip = get_filepath(filepath_partial(resp, url), overwrite) if skip: parfive.log.debug("File %s already exists and overwrite is False; skipping download.", filepath) return str(filepath) if callable(file_pb): file_pb = file_pb(position=token.n, unit='B', unit_scale=True, desc=filepath.name, leave=False, total=get_http_size(resp)) else: file_pb = None downloaded_chunk_queue = asyncio.Queue() download_workers = [] writer = asyncio.create_task( self._write_worker(downloaded_chunk_queue, file_pb, filepath)) if not DISABLE_RANGE and max_splits and resp.headers.get('Accept-Ranges', None) == "bytes": content_length = int(resp.headers['Content-length']) split_length = content_length // max_splits ranges = [ [start, start + split_length] for start in range(0, content_length, split_length) ] ranges[-1][1] = '' for _range in ranges: download_workers.append( asyncio.create_task(self._http_download_worker( session, url, chunksize, _range, timeout, downloaded_chunk_queue, **kwargs )) ) else: download_workers.append( asyncio.create_task(self._http_download_worker( session, url, chunksize, None, timeout, downloaded_chunk_queue, **kwargs )) ) await asyncio.gather(*download_workers) await downloaded_chunk_queue.join() writer.cancel() return str(filepath) except Exception as e: raise FailedDownload(filepath_partial, url, e)
Read the file from the given url into the filename given by ``filepath_partial``. Parameters ---------- session : `aiohttp.ClientSession` The `aiohttp.ClientSession` to use to retrieve the files. url : `str` The url to retrieve. filepath_partial : `callable` A function to call which returns the filepath to save the url to. Takes two arguments ``resp, url``. chunksize : `int` The number of bytes to read into the file at a time. file_pb : `tqdm.tqdm` or `False` Should progress bars be displayed for each file downloaded. token : `parfive.downloader.Token` A token for this download slot. max_splits: `int` Number of maximum concurrent connections per file. kwargs : `dict` Extra keyword arguments are passed to `aiohttp.ClientSession.get`. Returns ------- `str` The name of the file saved.
https://github.com/cadair/parfive/blob/a2d6c727c1341c0e2e60d8673bb97ea3d875896f/parfive/downloader.py#L464-L565
import os import sys import asyncio import logging import pathlib import warnings import contextlib import urllib.parse from functools import partial, lru_cache from concurrent.futures import ThreadPoolExecutor import aiohttp from tqdm import tqdm, tqdm_notebook import parfive from .results import Results from .utils import ( FailedDownload, Token, _QueueList, default_name, get_filepath, get_ftp_size, get_http_size, in_notebook, run_in_thread, ) try: import aioftp except ImportError: aioftp = None try: import aiofiles except ImportError: aiofiles = None SERIAL_MODE = "PARFIVE_SINGLE_DOWNLOAD" in os.environ DISABLE_RANGE = "PARFIVE_DISABLE_RANGE" in os.environ or SERIAL_MODE __all__ = ['Downloader'] class Downloader: def __init__(self, max_conn=5, progress=True, file_progress=True, loop=None, notebook=None, overwrite=False, headers=None, use_aiofiles=False): if loop: warnings.warn('The loop argument is no longer used, and will be ' 'removed in a future release.') self.max_conn = max_conn if not SERIAL_MODE else 1 self._init_queues() if notebook is None: notebook = in_notebook() self.progress = progress self.file_progress = file_progress if self.progress else False self.tqdm = tqdm if not notebook else tqdm_notebook self.overwrite = overwrite self.headers = headers if headers is None or 'User-Agent' not in headers: self.headers = { 'User-Agent': f"parfive/{parfive.__version__} aiohttp/{aiohttp.__version__} python/{sys.version[:5]}"} self._use_aiofiles = use_aiofiles def _init_queues(self): self.http_queue = _QueueList() self.ftp_queue = _QueueList() def _generate_tokens(self): queue = asyncio.Queue(maxsize=self.max_conn) for i in range(self.max_conn): queue.put_nowait(Token(i + 1)) return queue @property @lru_cache() def use_aiofiles(self): if aiofiles is None: return False if "PARFIVE_OVERWRITE_ENABLE_AIOFILES" in os.environ: return True return self._use_aiofiles @property @lru_cache() def default_chunk_size(self): return 1024 if self.use_aiofiles else 100 @property def queued_downloads(self): return len(self.http_queue) + len(self.ftp_queue) def enqueue_file(self, url, path=None, filename=None, overwrite=None, **kwargs): overwrite = overwrite or self.overwrite if path is None and filename is None: raise ValueError("Either path or filename must be specified.") elif path is None: path = './' path = pathlib.Path(path) if not filename: filepath = partial(default_name, path) elif callable(filename): filepath = filename else: def filepath(*args): return path / filename scheme = urllib.parse.urlparse(url).scheme if scheme in ('http', 'https'): get_file = partial(self._get_http, url=url, filepath_partial=filepath, overwrite=overwrite, **kwargs) self.http_queue.append(get_file) elif scheme == 'ftp': if aioftp is None: raise ValueError("The aioftp package must be installed to download over FTP.") get_file = partial(self._get_ftp, url=url, filepath_partial=filepath, overwrite=overwrite, **kwargs) self.ftp_queue.append(get_file) else: raise ValueError("URL must start with either 'http' or 'ftp'.") @staticmethod def _run_in_loop(coro): try: loop = asyncio.get_running_loop() except RuntimeError: loop = None if loop and loop.is_running(): aio_pool = ThreadPoolExecutor(1) new_loop = asyncio.new_event_loop() return run_in_thread(aio_pool, new_loop, coro) return asyncio.run(coro) @staticmethod def _configure_debug(): sh = logging.StreamHandler() sh.setLevel(logging.DEBUG) formatter = logging.Formatter('%(name)s - %(levelname)s - %(message)s') sh.setFormatter(formatter) parfive.log.addHandler(sh) parfive.log.setLevel(logging.DEBUG) aiohttp_logger = logging.getLogger('aiohttp.client') aioftp_logger = logging.getLogger('aioftp.client') aioftp_logger.addHandler(sh) aioftp_logger.setLevel(logging.DEBUG) aiohttp_logger.addHandler(sh) aiohttp_logger.setLevel(logging.DEBUG) parfive.log.debug("Configured parfive to run with debug logging...") async def run_download(self, timeouts=None): if "PARFIVE_DEBUG" in os.environ: self._configure_debug() timeouts = timeouts or {"total": os.environ.get("PARFIVE_TOTAL_TIMEOUT", 5 * 60), "sock_read": os.environ.get("PARFIVE_SOCK_READ_TIMEOUT", 90)} total_files = self.queued_downloads done = set() with self._get_main_pb(total_files) as main_pb: if len(self.http_queue): done.update(await self._run_http_download(main_pb, timeouts)) if len(self.ftp_queue): done.update(await self._run_ftp_download(main_pb, timeouts)) dl_results = await asyncio.gather(*done, return_exceptions=True) results = Results() for res in dl_results: if isinstance(res, FailedDownload): results.add_error(res.filepath_partial, res.url, res.exception) parfive.log.info(f'{res.url} failed to download with exception\n' f'{res.exception}') elif isinstance(res, Exception): raise res else: results.append(res) return results def download(self, timeouts=None): return self._run_in_loop(self.run_download(timeouts)) def retry(self, results): self._init_queues() for err in results.errors: self.enqueue_file(err.url, filename=err.filepath_partial) new_res = self.download() results += new_res results._errors = new_res._errors return results @classmethod def simple_download(cls, urls, *, path="./", overwrite=None): dl = cls() for url in urls: dl.enqueue_file(url, path=path, overwrite=overwrite) return dl.download() def _get_main_pb(self, total): if self.progress: return self.tqdm(total=total, unit='file', desc="Files Downloaded", position=0) else: return contextlib.contextmanager(lambda: iter([None]))() async def _run_http_download(self, main_pb, timeouts): async with aiohttp.ClientSession(headers=self.headers) as session: self._generate_tokens() futures = await self._run_from_queue( self.http_queue.generate_queue(), self._generate_tokens(), main_pb, session=session, timeouts=timeouts) done, _ = await asyncio.wait(futures) return done async def _run_ftp_download(self, main_pb, timeouts): futures = await self._run_from_queue( self.ftp_queue.generate_queue(), self._generate_tokens(), main_pb, timeouts=timeouts) done, _ = await asyncio.wait(futures) return done async def _run_from_queue(self, queue, tokens, main_pb, *, session=None, timeouts): futures = [] while not queue.empty(): get_file = await queue.get() token = await tokens.get() file_pb = self.tqdm if self.file_progress else False future = asyncio.create_task(get_file(session, token=token, file_pb=file_pb, timeouts=timeouts)) def callback(token, future, main_pb): tokens.put_nowait(token) if main_pb and not future.exception(): main_pb.update(1) future.add_done_callback(partial(callback, token, main_pb=main_pb)) futures.append(future) return futures
MIT License
commvault/cvpysdk
cvpysdk/subclients/fssubclient.py
FileSystemSubclient.block_level_backup_option
python
def block_level_backup_option(self, block_level_backup_value): self._set_subclient_properties( "_fsSubClientProp['blockLevelBackup']", block_level_backup_value)
Creates the JSON with the specified blocklevel flag to pass to the API to update the blocklevel of this File System Subclient. Args: block_level_backup_value (bool) -- Specifies to enable or disable blocklevel option
https://github.com/commvault/cvpysdk/blob/66df30e6e31d619812b7756cb4f7e130b220a08f/cvpysdk/subclients/fssubclient.py#L713-L724
from __future__ import unicode_literals from base64 import b64encode from past.builtins import basestring from ..client import Client from ..subclient import Subclient from ..exception import SDKException from ..job import Job def _nested_dict(source, update_dict): for key, value in update_dict.items(): if isinstance(value, dict) and value: source[key] = _nested_dict(source.get(key, {}), value) else: source[key] = value return source class FileSystemSubclient(Subclient): def _get_subclient_properties(self): super(FileSystemSubclient, self)._get_subclient_properties() self._impersonateUser={} if 'impersonateUser' in self._subclient_properties: self._impersonateUser = self._subclient_properties['impersonateUser'] if 'fsSubClientProp' in self._subclient_properties: self._fsSubClientProp = self._subclient_properties['fsSubClientProp'] if 'content' in self._subclient_properties: self._content = self._subclient_properties['content'] self._global_filter_status_dict = { 'OFF': 0, 'ON': 1, 'USE CELL LEVEL POLICY': 2 } def _get_subclient_properties_json(self): subclient_json = { "subClientProperties": { "impersonateUser": self._impersonateUser, "proxyClient": self._proxyClient, "subClientEntity": self._subClientEntity, "fsSubClientProp": self._fsSubClientProp, "content": self._content, "commonProperties": self._commonProperties, "contentOperationType": 1 } } if 'isDDBSubclient' in self._fs_subclient_prop: if self._fs_subclient_prop['isDDBSubclient']: del subclient_json["subClientProperties"]["content"] return subclient_json @property def _fs_subclient_prop(self): return self._fsSubClientProp @_fs_subclient_prop.setter def _fs_subclient_prop(self, value): if not isinstance(value, dict): raise SDKException('Subclient', '101') _nested_dict(self._fsSubClientProp, value) if 'enableOnePass' in self._fsSubClientProp: del self._fsSubClientProp['enableOnePass'] if 'isTurboSubclient' in self._commonProperties: del self._commonProperties['isTurboSubclient'] def _set_content(self, content=None, filter_content=None, exception_content=None): if content is None: content = self.content if filter_content is None: filter_content = self.filter_content if exception_content is None: exception_content = self.exception_content update_content = [] for path in content: file_system_dict = { "path": path } update_content.append(file_system_dict) for path in filter_content: filter_dict = { "excludePath": path } update_content.append(filter_dict) for path in exception_content: exception_dict = { "includePath": path } update_content.append(exception_dict) self._set_subclient_properties("_content", update_content) def _common_backup_options(self, options): final_dict = super(FileSystemSubclient, self)._common_backup_options(options) common_options = { "jobDescription": options.get('job_description', ""), "jobRetryOpts": { "killRunningJobWhenTotalRunningTimeExpires": options.get( 'kill_running_job_when_total_running_time_expires', False), "numberOfRetries": options.get('number_of_retries', 0), "enableNumberOfRetries": options.get('enable_number_of_retries', False), "runningTime": { "enableTotalRunningTime": options.get('enable_total_running_time', False), "totalRunningTime": options.get('total_running_time', 3600) } }, "startUpOpts": { "startInSuspendedState": options.get('start_in_suspended_state', False), "useDefaultPriority": options.get('use_default_priority', True), "priority": options.get('priority', 166) } } return common_options def _advanced_backup_options(self, options): final_dict = super(FileSystemSubclient, self)._advanced_backup_options(options) if 'on_demand_input' in options and options['on_demand_input'] is not None: final_dict['onDemandInputFile'] = options['on_demand_input'] if 'directive_file' in options and options['directive_file'] is not None: final_dict['onDemandInputFile'] = options['directive_file'] if 'adhoc_backup' in options and options['adhoc_backup'] is not None: final_dict['adHocBackup'] = options['adhoc_backup'] if 'inline_bkp_cpy' in options or 'skip_catalog' in options: final_dict['dataOpt'] = { 'createBackupCopyImmediately': options.get('inline_bkp_cpy', False), 'skipCatalogPhaseForSnapBackup': options.get('skip_catalog', False)} if 'adhoc_backup_contents' in options and options['adhoc_backup_contents'] is not None: if not isinstance(options['adhoc_backup_contents'], list): raise SDKException('Subclient', '101') final_dict['adHocBkpContents'] = { 'selectedAdHocPaths': options['adhoc_backup_contents'] } if 'use_multi_stream' in options and options['use_multi_stream']: multi_stream_opts = { 'useMultiStream': options.get('use_multi_stream', False), 'useMaximumStreams': options.get('use_maximum_streams', True), 'maxNumberOfStreams': options.get('max_number_of_streams', 1) } if 'dataOpt' in final_dict and isinstance(final_dict['dataOpt'], dict): final_dict['dataOpt'].update(multi_stream_opts) else: final_dict['dataOpt'] = multi_stream_opts return final_dict @property def _vlr_restore_options_dict(self): physical_volume = 'PHYSICAL_VOLUME' vlr_options_dict = { "volumeRstOption": { "volumeLeveRestore": True, "volumeLevelRestoreType": physical_volume }, "virtualServerRstOption": { "isDiskBrowse": False, "isVolumeBrowse": True, "isBlockLevelReplication": False } } return vlr_options_dict @property def content(self): content = [] for path in self._content: if 'path' in path: content.append(path["path"]) return content @content.setter def content(self, subclient_content): if isinstance(subclient_content, list) and subclient_content != []: self._set_content(content=subclient_content) else: raise SDKException( 'Subclient', '102', 'Subclient content should be a list value and not empty') @property def filter_content(self): _filter_content = [] for path in self._content: if 'excludePath' in path: _filter_content.append(path["excludePath"]) return _filter_content @filter_content.setter def filter_content(self, value): if isinstance(value, list) and value != []: self._set_content(filter_content=value) else: raise SDKException( 'Subclient', '102', 'Subclient filter content should be a list value and not empty') @property def exception_content(self): _exception_content = [] for path in self._content: if 'includePath' in path: _exception_content.append(path["includePath"]) return _exception_content @exception_content.setter def exception_content(self, value): if isinstance(value, list) and value != []: self._set_content(exception_content=value) else: raise SDKException( 'Subclient', '102', 'Subclient exception content should be a list value and not empty') @property def scan_type(self): return self._fsSubClientProp['scanOption'] @scan_type.setter def scan_type(self, scan_type_value): if isinstance(scan_type_value, int) and scan_type_value >= 1 and scan_type_value <= 3: self._set_subclient_properties("_fsSubClientProp['scanOption']", scan_type_value) else: raise SDKException('Subclient', '102', 'Invalid scan type') @property def trueup_option(self): return self._fsSubClientProp['isTrueUpOptionEnabledForFS'] @trueup_option.setter def trueup_option(self, trueup_option_value): self._set_subclient_properties( "_fsSubClientProp['isTrueUpOptionEnabledForFS']", trueup_option_value ) def run_backup_copy(self): request_json = { "taskInfo": { "associations": [ { "clientName": self._client_object._client_name, "subclientName": self._subclient_name, "backupsetName": self._backupset_object._backupset_name, "storagePolicyName": self.storage_policy, "_type_": 17, "appName": self._agent_object._agent_name } ], "task": { "taskType": 1, "initiatedFrom": 1, "taskId": 0, "taskFlags": { "disabled": False } }, "subTasks": [ { "subTaskOperation": 1, "subTask": { "subTaskType": 1, "operationType": 4028 }, "options": { "adminOpts": { "snapToTapeOption": { "allowMaximum": True, "noofJobsToRun": 1 } } } } ] } } backup_copy = self._commcell_object._services['CREATE_TASK'] flag, response = self._commcell_object._cvpysdk_object.make_request( 'POST', backup_copy, request_json) if flag: if response.json(): if "jobIds" in response.json(): return Job(self._commcell_object, response.json()['jobIds'][0]) elif "errorCode" in response.json(): error_message = response.json()['errorMessage'] o_str = 'Backup copy job failed\nError: "{0}"'.format(error_message) raise SDKException('Subclient', '118', o_str) else: raise SDKException('Subclient', '118', 'Failed to run the backup copy job') else: raise SDKException('Response', '102') else: response_string = self._commcell_object._update_response_(response.text) raise SDKException('Response', '101', response_string) @property def backup_retention(self): return self._fsSubClientProp['backupRetention'] @backup_retention.setter def backup_retention(self, value): if isinstance(value, bool): if value: new_value = { 'extendStoragePolicyRetention': True, 'backupRetention': True} else: new_value = {'backupRetention': False} self._set_subclient_properties("_fs_subclient_prop", new_value) else: raise SDKException( 'Subclient', '102', 'argument should only be boolean') @property def block_level_backup_option(self): return self._fsSubClientProp['blockLevelBackup'] @block_level_backup_option.setter
Apache License 2.0
microstrategy/mstrio-py
admin_demo_scripts/delete_subscriptions_of_departed_users.py
delete_subscriptions_of_departed_users
python
def delete_subscriptions_of_departed_users(connection: "Connection") -> None: response = get_projects(connection, whitelist=[('ERR014', 403)]) prjcts = response.json() if response.ok else [] all_usrs = list_users(connection=connection) dsbld_usrs = [u for u in all_usrs if not u.enabled] for prjct in prjcts: project_id = prjct['id'] sub_mngr = SubscriptionManager(connection=connection, project_id=project_id) for usr in dsbld_usrs: subs = sub_mngr.list_subscriptions(owner={'id': usr.id}) msg = f"subscriptions of user with ID: {usr.id}" msg += f" in project {prjct.name} with ID: {prjct.id}" if sub_mngr.delete(subscriptions=subs, force=True): print("All " + msg + " were deleted.") else: print("Not all " + msg + " were deleted or there was no subsscriptions.")
Delete all subscription in all projects which owners are departed users. Args: Args: connection: MicroStrategy connection object returned by `connection.Connection()`
https://github.com/microstrategy/mstrio-py/blob/720af7e673ed62462366d1406e5ea14792461e94/admin_demo_scripts/delete_subscriptions_of_departed_users.py#L7-L35
from mstrio.users_and_groups import list_users from mstrio.api.projects import get_projects from mstrio.distribution_services.subscription.subscription_manager import SubscriptionManager from mstrio.connection import Connection
Apache License 2.0
livid/v2ex-gae
mapreduce/lib/blobstore/blobstore.py
fetch_data
python
def fetch_data(blob, start_index, end_index): if isinstance(blob, BlobInfo): blob = blob.key() return blobstore.fetch_data(blob, start_index, end_index)
Fetch data for blob. Fetches a fragment of a blob up to MAX_BLOB_FETCH_SIZE in length. Attempting to fetch a fragment that extends beyond the boundaries of the blob will return the amount of data from start_index until the end of the blob, which will be a smaller size than requested. Requesting a fragment which is entirely outside the boundaries of the blob will return empty string. Attempting to fetch a negative index will raise an exception. Args: blob: BlobInfo, BlobKey, str or unicode representation of BlobKey of blob to fetch data from. start_index: Start index of blob data to fetch. May not be negative. end_index: End index (inclusive) of blob data to fetch. Must be >= start_index. Returns: str containing partial data of blob. If the indexes are legal but outside the boundaries of the blob, will return empty string. Raises: TypeError if start_index or end_index are not indexes. Also when blob is not a string, BlobKey or BlobInfo. DataIndexOutOfRangeError when start_index < 0 or end_index < start_index. BlobFetchSizeTooLargeError when request blob fragment is larger than MAX_BLOB_FETCH_SIZE. BlobNotFoundError when blob does not exist.
https://github.com/livid/v2ex-gae/blob/32be3a77d535e7c9df85a333e01ab8834d0e8581/mapreduce/lib/blobstore/blobstore.py#L439-L470
import cgi import email import os from google.appengine.api import datastore from google.appengine.api import datastore_errors from google.appengine.api import datastore_types from google.appengine.api.blobstore import blobstore from google.appengine.ext import db __all__ = ['BLOB_INFO_KIND', 'BLOB_KEY_HEADER', 'BLOB_RANGE_HEADER', 'BlobFetchSizeTooLargeError', 'BlobInfo', 'BlobInfoParseError', 'BlobKey', 'BlobNotFoundError', 'BlobReferenceProperty', 'BlobReader', 'DataIndexOutOfRangeError', 'Error', 'InternalError', 'MAX_BLOB_FETCH_SIZE', 'UPLOAD_INFO_CREATION_HEADER', 'create_upload_url', 'delete', 'fetch_data', 'get', 'parse_blob_info'] Error = blobstore.Error InternalError = blobstore.InternalError BlobFetchSizeTooLargeError = blobstore.BlobFetchSizeTooLargeError BlobNotFoundError = blobstore.BlobNotFoundError _CreationFormatError = blobstore._CreationFormatError DataIndexOutOfRangeError = blobstore.DataIndexOutOfRangeError BlobKey = blobstore.BlobKey create_upload_url = blobstore.create_upload_url delete = blobstore.delete class BlobInfoParseError(Error): BLOB_INFO_KIND = blobstore.BLOB_INFO_KIND BLOB_KEY_HEADER = blobstore.BLOB_KEY_HEADER BLOB_RANGE_HEADER = blobstore.BLOB_RANGE_HEADER MAX_BLOB_FETCH_SIZE = blobstore.MAX_BLOB_FETCH_SIZE UPLOAD_INFO_CREATION_HEADER = blobstore.UPLOAD_INFO_CREATION_HEADER class _GqlQuery(db.GqlQuery): def __init__(self, query_string, model_class, *args, **kwds): from google.appengine.ext import gql app = kwds.pop('_app', None) self._proto_query = gql.GQL(query_string, _app=app, namespace='') super(db.GqlQuery, self).__init__(model_class, namespace='') self.bind(*args, **kwds) class BlobInfo(object): _unindexed_properties = frozenset() @property def content_type(self): return self.__get_value('content_type') @property def creation(self): return self.__get_value('creation') @property def filename(self): return self.__get_value('filename') @property def size(self): return self.__get_value('size') def __init__(self, entity_or_blob_key, _values=None): if isinstance(entity_or_blob_key, datastore.Entity): self.__entity = entity_or_blob_key self.__key = BlobKey(entity_or_blob_key.key().name()) elif isinstance(entity_or_blob_key, BlobKey): self.__entity = _values self.__key = entity_or_blob_key else: TypeError('Must provide Entity or BlobKey') @classmethod def from_entity(cls, entity): return BlobInfo(entity) @classmethod def properties(cls): return set(('content_type', 'creation', 'filename', 'size')) def __get_value(self, name): if self.__entity is None: self.__entity = datastore.Get( datastore_types.Key.from_path( self.kind(), str(self.__key), namespace='')) try: return self.__entity[name] except KeyError: raise AttributeError(name) def key(self): return self.__key def delete(self): delete(self.key()) @classmethod def get(cls, blob_keys): blob_keys = cls.__normalize_and_convert_keys(blob_keys) try: entities = datastore.Get(blob_keys) except datastore_errors.EntityNotFoundError: return None if isinstance(entities, datastore.Entity): return BlobInfo(entities) else: references = [] for entity in entities: if entity is not None: references.append(BlobInfo(entity)) else: references.append(None) return references @classmethod def all(cls): return db.Query(model_class=cls, namespace='') @classmethod def __factory_for_kind(cls, kind): if kind == BLOB_INFO_KIND: return BlobInfo raise ValueError('Cannot query for kind %s' % kind) @classmethod def gql(cls, query_string, *args, **kwds): return _GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string), cls, *args, **kwds) @classmethod def kind(self): return BLOB_INFO_KIND @classmethod def __normalize_and_convert_keys(cls, keys): if isinstance(keys, (list, tuple)): multiple = True keys = list(keys) else: multiple = False keys = [keys] for index, key in enumerate(keys): if not isinstance(key, (basestring, BlobKey)): raise datastore_errors.BadArgumentError( 'Expected str or BlobKey; received %s (a %s)' % ( key, datastore.typename(key))) keys[index] = datastore.Key.from_path(cls.kind(), str(key), namespace='') if multiple: return keys else: return keys[0] def get(blob_key): return BlobInfo.get(blob_key) def parse_blob_info(field_storage): if field_storage is None: return None field_name = field_storage.name def get_value(dict, name): value = dict.get(name, None) if value is None: raise BlobInfoParseError( 'Field %s has no %s.' % (field_name, name)) return value filename = get_value(field_storage.disposition_options, 'filename') blob_key = BlobKey(get_value(field_storage.type_options, 'blob-key')) upload_content = email.message_from_file(field_storage.file) content_type = get_value(upload_content, 'content-type') size = get_value(upload_content, 'content-length') creation_string = get_value(upload_content, UPLOAD_INFO_CREATION_HEADER) try: size = int(size) except (TypeError, ValueError): raise BlobInfoParseError( '%s is not a valid value for %s size.' % (size, field_name)) try: creation = blobstore._parse_creation(creation_string, field_name) except blobstore._CreationFormatError, err: raise BlobInfoParseError(str(err)) return BlobInfo(blob_key, {'content_type': content_type, 'creation': creation, 'filename': filename, 'size': size, }) class BlobReferenceProperty(db.Property): data_type = BlobInfo def get_value_for_datastore(self, model_instance): blob_info = getattr(model_instance, self.name) if blob_info is None: return None return blob_info.key() def make_value_from_datastore(self, value): if value is None: return None return BlobInfo(value) def validate(self, value): if isinstance(value, (basestring)): value = BlobInfo(BlobKey(value)) elif isinstance(value, BlobKey): value = BlobInfo(value) return super(BlobReferenceProperty, self).validate(value)
BSD 3-Clause New or Revised License
alexa/alexa-apis-for-python
ask-sdk-model/ask_sdk_model/interfaces/audioplayer/clear_queue_directive.py
ClearQueueDirective.__init__
python
def __init__(self, clear_behavior=None): self.__discriminator_value = "AudioPlayer.ClearQueue" self.object_type = self.__discriminator_value super(ClearQueueDirective, self).__init__(object_type=self.__discriminator_value) self.clear_behavior = clear_behavior
:param clear_behavior: :type clear_behavior: (optional) ask_sdk_model.interfaces.audioplayer.clear_behavior.ClearBehavior
https://github.com/alexa/alexa-apis-for-python/blob/bfe5e694daaca71bfb1a4199ca8d2514f1cac6c9/ask-sdk-model/ask_sdk_model/interfaces/audioplayer/clear_queue_directive.py#L48-L59
import pprint import re import six import typing from enum import Enum from ask_sdk_model.directive import Directive if typing.TYPE_CHECKING: from typing import Dict, List, Optional, Union, Any from datetime import datetime from ask_sdk_model.interfaces.audioplayer.clear_behavior import ClearBehavior as ClearBehavior_94e71750 class ClearQueueDirective(Directive): deserialized_types = { 'object_type': 'str', 'clear_behavior': 'ask_sdk_model.interfaces.audioplayer.clear_behavior.ClearBehavior' } attribute_map = { 'object_type': 'type', 'clear_behavior': 'clearBehavior' } supports_multiple_types = False
Apache License 2.0
microsoft/restler-fuzzer
restler/checkers/payload_body_bucketing.py
PayloadBodyBuckets.add_bug
python
def add_bug(self, request, new_request_data): new_body = utils.get_response_body(new_request_data) with open(os.path.join(logger.LOGS_DIR, 'payload_buckets.txt'), 'a', encoding='utf-8') as file: if request.method_endpoint_hex_definition not in self._buckets: self._buckets[request.method_endpoint_hex_definition] = set() file.write(f'{request.method} {request.endpoint_no_dynamic_objects}\n') error_str = self._get_error_str(request, new_body) or 'Other' if error_str not in self._buckets[request.method_endpoint_hex_definition]: if error_str == INVALID_JSON_STR: new_body = new_request_data.split(DELIM)[-1] self._buckets[request.method_endpoint_hex_definition].add(error_str) file.write(f'\t{error_str}\n\t{new_body}\n\n') return (error_str, new_body) return None
Adds a bug to the payload body buckets log if it is unique. @param request: The request being fuzzed @type request: Request @param new_request_data: The request data of the new request that includes the fuzzed payload body. @type new_request_data: Str @return: Tuple containing the error string and the response body @rtype : Tuple(str, str) or None
https://github.com/microsoft/restler-fuzzer/blob/d74a267467a2d43fb37c8a16754d0b28e80b649a/restler/checkers/payload_body_bucketing.py#L17-L48
import os import utils.logger as logger import engine.fuzzing_parameters.fuzzing_utils as utils from engine.transport_layer.messaging import DELIM INVALID_JSON_STR = 'InvalidJson' class PayloadBodyBuckets(): def __init__(self): self._buckets = dict()
MIT License
zxz7/zfundpredictor
ZFundETL.py
FundETL.read_files
python
def read_files(self, path, date): funds = pd.read_csv(path+f'funds_{date}.csv') funds = funds.set_index(pd.to_datetime(funds['Date']).dt.date).drop(columns='Date').sort_index() categorical = pd.read_csv(path+f'categorical_{date}.csv') categorical['fticker'] = [t for t in funds.columns if t not in ['sindex_r', 'tbond_d']] categorical.set_index('fticker', inplace=True) self.funds = funds self.categorical = categorical return funds, categorical
Read in existing `funds` and `categorical` datasets. `date`: a time string taking the form of '%Y%m%d'.
https://github.com/zxz7/zfundpredictor/blob/0f0350efcfd57eb8cef20f6711911fd7582d775c/ZFundETL.py#L372-L387
import math from datetime import datetime, timedelta, date import re import pymysql import yfinance as yf import pandas as pd import numpy as np from matplotlib import pyplot as plt import seaborn as sns class FundETL: def __init__(self): self.industries = ['制造业', '金融业', '信息传输、软件和信息技术服务业', '房地产业', '交通运输、仓储和邮政业', '农、林、牧、渔业', '批发和零售业', '采矿业', '住宿和餐饮业', '租赁和商务服务业', '水利、环境和公共设施管理业', '文化、体育和娱乐业', '科学研究和技术服务业', '卫生和社会工作', '建筑业', '电力、热力、燃气及水生产和供应业', '教育', '综合'] sns.set_style("darkgrid") sns.set_context("notebook") def sql_queries(self): connection = pymysql.connect(host='localhost', user='root', password='root', db='funds', charset='utf8mb4') cursor = connection.cursor() sql = ''' CREATE OR REPLACE VIEW picked AS SELECT a.* FROM ( SELECT fticker FROM histories WHERE hdate >= '2015-01-05' AND r_d != 0 GROUP BY fticker HAVING COUNT(*) > CEIL(DATEDIFF(CURDATE(), '2016-01-05')*0.75)) f JOIN ( SELECT * FROM all_funds WHERE current_stocks >= 75 AND current_net_assets > 2) a ON a.fticker = f.fticker WHERE current_bonds < 25 OR current_bonds IS NULL; ''' cursor.execute(sql) connection.commit() sql = ''' SELECT h.fticker, h.hdate, accu_nav FROM histories h RIGHT JOIN picked p ON h.fticker = p.fticker WHERE h.hdate >= '2015-01-05'; ''' self.funds_sql = pd.read_sql(sql, connection) sql = ''' SELECT fticker, ftype, current_style, current_net_assets, current_stocks, current_bonds, current_cash, industry_1, industry_1_pct, industry_2, industry_2_pct, industry_3, industry_3_pct, industry_4, industry_4_pct, industry_5, industry_5_pct, manager_ranking FROM picked; ''' self.st_invariants = pd.read_sql(sql, connection) connection.close() cursor.close() def get_index(self, index_type='stock'): if index_type == 'stock': sindex = yf.download("000001.ss", start=str(self.funds_sql['hdate'].min()), end=str(self.funds_sql['hdate'].max()+timedelta(days=1))) sindex.loc['2019-12-19'] = [3017.15,3021.42,3007.99,3017.07,3017.07,208600] sindex.loc['2019-04-29'] = [3090.63,3107.76,3050.03,3062.50,3062.50,292100] sindex.loc['2019-04-30'] = [3052.62,3088.41,3052.62,3078.34,3078.34,222300] sindex['sindex_r'] = (sindex['Adj Close'] - sindex['Adj Close'].shift(1)) / sindex['Adj Close'].shift(1)*100 sindex['Date'] = sindex.index sindex = sindex.set_index(pd.to_datetime(sindex['Date']).dt.date).drop(columns='Date').sort_index() return sindex if index_type == 'bond': tbond = pd.read_csv(r'China 10-Year Bond Yield Historical Data.csv') tbond = tbond.set_index(pd.to_datetime(tbond['Date']).dt.date).sort_index() tbond['tbond_d'] = tbond['Change %'].str.rstrip('%').astype('float') / 100.0 tbond.drop(columns=['Date', 'Change %'], inplace=True) return tbond def find_missing_values(self, show_results=True): drop_tickers = self.funds_sql[self.funds_sql['accu_nav'].isnull()]['fticker'].unique() if show_results: n_col = math.ceil(len(drop_tickers)/2) fig = plt.figure(figsize=(2*n_col,4)) fig.suptitle('Funds With Missing Values in Historical Prices', fontsize=14) for i, ticker in enumerate(drop_tickers): _null = self.funds_sql[self.funds_sql['fticker'] == ticker]['accu_nav'].isnull().sum() _notnull = self.funds_sql[self.funds_sql['fticker'] == ticker]['accu_nav'].notnull().sum() ax = fig.add_subplot(2, n_col, i+1) fig.tight_layout() ax.pie([_null, _notnull], radius=1.1, wedgeprops=dict(width=0.2), colors=sns.color_palette('twilight_shifted', n_colors=2), autopct=lambda pct: '# NA: {:.0f}\n# All: {:.0f}'.format(_null, _null+_notnull) if int(pct) == int(_null/(_notnull+_null)*100) else '') plt.xlabel(ticker) print('Number of funds with missing values in *Historical Prices*:', len(drop_tickers)) drop_tickers2 = self.st_invariants[(self.st_invariants['industry_1'].isnull()) | (self.st_invariants['manager_ranking'].isnull())]['fticker'] print('Number of funds with missing values in *Short-Term Invariant Variables*:', len(drop_tickers2)) drop_tickers = set(drop_tickers) | set(drop_tickers2) print('Total number of funds to be dropped because of missing data:', len(drop_tickers)) plt.show() return drop_tickers def count_days(self, show_results=True): funds_length = self.funds_sql.groupby('fticker')['hdate'].count() count_per_length = funds_length.groupby(funds_length.values).count() max_count = count_per_length.max() rest = count_per_length.sum() - max_count most_common_length = count_per_length[count_per_length == max_count].index[0] tickers_common_length = funds_length[funds_length == most_common_length].index if show_results: fig, ax = plt.subplots(figsize=(3,3)) ax.pie([max_count, rest], wedgeprops=dict(width=0.15), radius=0.9, colors=sns.color_palette('twilight_shifted', n_colors=2), autopct=lambda pct: '{:.2f}%\n(# funds: {})'.format(pct, max_count) if pct>50 else '') plt.title('Available Length of Funds', fontsize=14) plt.legend([str(most_common_length)+' days', 'Other lengths'], loc='lower center', ncol=2) plt.show() return tickers_common_length def ticker_filter(self, show_results=True): drop_tickers = self.find_missing_values(show_results) tickers_common_length = self.count_days(show_results) return np.array([t for t in tickers_common_length if t not in drop_tickers]) def get_funds(self, selected_tickers, stock_index, bond_index=None, show_results=True): for ticker, histories in self.funds_sql.groupby('fticker'): if ticker in selected_tickers: if ticker == selected_tickers[0]: funds = pd.DataFrame(index=histories['hdate']) funds[ticker] = histories['accu_nav'].values fund_std = funds.apply(lambda x: x.std()) cutoff = 0.75 highly_volatile = fund_std[fund_std > cutoff] if show_results: fig, ax = plt.subplots(figsize=(10, 1.5)) sns.boxplot(data=fund_std, orient='h', color='mediumslateblue', width=0.3, ax=ax) ax.vlines(cutoff, -0.5, 0.5, linestyles='dashed', colors='orange') for order, sorting_idx in enumerate(highly_volatile.argsort()[::-1]): stv = highly_volatile[sorting_idx] stv_ticker = fund_std[fund_std == stv].index[0] arrowprops = {'arrowstyle':'simple,head_length=0.8,head_width=0.6,tail_width=0.3', 'ec':None, 'facecolor':'orange', 'connectionstyle':'arc3', 'shrinkA':0, 'shrinkB':5} if order%4 == 0: ax.text(stv-0.06, -0.1, stv_ticker) elif order%4 == 2: plt.annotate(stv_ticker, xy=(stv, 0), xytext=(stv-0.01, -0.3), arrowprops=arrowprops) elif order%4 == 1: ax.text(stv-0.06, 0.2, stv_ticker) else: plt.annotate(stv_ticker, xy=(stv, 0), xytext=(stv-0.04, 0.4), arrowprops=arrowprops) plt.yticks([0], ['STDEV']) plt.title('Volatility of Funds', fontsize=14) plt.show() new_cols = [c for c in funds.columns if c not in highly_volatile.index] funds = funds.loc[:, new_cols] if bond_index: funds_ = pd.concat([funds, stock_index, bond_index], axis=1, join='inner').dropna() else: funds_ = pd.concat([funds, stock_index], axis=1, join='inner').dropna() funds_.index.rename('Date', inplace=True) if show_results: print('Removing funds with excessive volatility:', [c for c in highly_volatile.index]) print('Dates further dropped:', [str(i) for i in funds.index if i not in funds_.index]) print('Final available funds:', funds.shape[1]) print('Final available days:', funds_.shape[0]) self.funds = funds_ return self.funds def build_categories(self): categorical = pd.DataFrame(index=self.st_invariants['fticker']) categorical['fund_type'] = self.st_invariants['ftype'].astype('category').values categorical['fund_style'] = self.st_invariants['current_style'].astype('category').values categorical['asset_size'] = pd.qcut(self.st_invariants['current_net_assets'].values, 4) categorical['ranking_score'] = pd.cut(self.st_invariants['manager_ranking'].values, [0,0.25,0.5,0.75,1]) for col in ['current_stocks', 'current_bonds', 'current_cash']: categorical[col] = (self.st_invariants[col]/100).fillna(0).values weighted_oh = [] for x in range(1,6): _oh = pd.get_dummies(self.st_invariants['industry_%s'%x].values) for ind in range(_oh.shape[1]): _oh.iloc[:,ind] = _oh.iloc[:,ind]*self.st_invariants['industry_%s_pct'%x].values/100 weighted_oh.append(_oh) industry_w = pd.DataFrame(index=self.st_invariants['fticker'], columns=self.industries).applymap(lambda x: 0) columns = [] for num, indust in enumerate(self.industries): for x in range(1,6): if indust in set(self.st_invariants['industry_%s'%x]): industry_w[indust] = industry_w[indust].values + weighted_oh[x-1][indust].values columns.append('ind_%s'%num) industry_w.columns = columns categorical = pd.concat([categorical, industry_w], axis=1) tickers_ = [t for t in self.funds.columns if t not in ['sindex_r', 'tbond_d']] self.categorical = categorical.loc[tickers_] return self.categorical def categorical_summary(self): industry_count = len(self.industries) dicts = [{'混合型':'hybrid', '股票型':'stock', '股票指数':'stock index'}, {'大盘价值':'large value', '大盘平衡':'large balanced', '大盘成长':'large growth', '中盘价值':'mid value', '中盘平衡':'mid balanced', '中盘成长':'mid growth', '小盘价值':'small value', '小盘平衡':'small balanced', '小盘成长':'small growth'}] groups = ['fund_type', 'fund_style', 'asset_size', 'ranking_score'] industry_w = self.categorical.iloc[:,-industry_count:] allocation = self.categorical.loc[:,['current_stocks','current_bonds','current_cash']] fig = plt.figure(figsize=(14,3.5)) cmap = plt.get_cmap('tab20b') fig.suptitle('Categorical Features', fontsize=16) for i,feat in enumerate(groups): ax = fig.add_subplot(1,4,i+1) fig.tight_layout() grouped = self.categorical[groups].groupby(feat)[feat] ax.pie(grouped.count(), radius=0.85, wedgeprops=dict(width=0.15), colors=sns.color_palette('twilight_shifted', n_colors=len(grouped)), autopct=lambda pct: '{:.2f}%\n({:.0f})'.format(pct, self.categorical.shape[0]*pct/100) if pct>5 else '') legend_param = {'loc':'lower center', 'ncol':2} if i in [0, 1]: plt.legend([dicts[i][idx] for idx, group in grouped], **legend_param) else: plt.legend([idx for idx, group in grouped], **legend_param) plt.title(re.sub('_',' ', feat).capitalize(), fontsize=13, pad=-20) fig, ax1 = plt.subplots(figsize=(12,4)) sns.boxplot(data=pd.concat((allocation, industry_w), axis=1), ax=ax1, width=0.4, palette=sns.color_palette('Set2', n_colors=len(industry_w.columns)+3)) plt.xticks(range(len(industry_w.columns)+3), ['Stocks', 'Bonds', 'Cash']+['Industry '+col.strip('ind_') for col in industry_w.columns], rotation=90) plt.ylabel('Weights') plt.show() industry_dict = {'住宿和餐饮业':'Hospitality & Catering', '租赁和商务服务业':'Lease & Business Services', '水利、环境和公共设施管理业':'Water Conservancy, Environment & Public Facilities Management', '金融业':'Finance', '文化、体育和娱乐业':'Culture, Sports & Entertainment', '房地产业':'Real Estate', '科学研究和技术服务业':'Scientific Research & Technical Services', '交通运输、仓储和邮政业':'Transportation, Warehousing & Postal Services', '批发和零售业':'Wholesale & Retail Trade', '卫生和社会工作':'Health & Social Work', '农、林、牧、渔业':'Agriculture, Forestry, Animal Husbandry & Fishery', '综合':'Comprehensive', '电力、热力、燃气及水生产和供应业':'Power, Heat, Gas & Water Production and Supply', '建筑业':'Construction', '制造业':'Manufacturing', '采矿业':'Mining', '信息传输、软件和信息技术服务业':'Information Transmission, Software & Information Technology Services', '教育':'Education'} industries_ = self.industries + [industry_dict[ind] for ind in self.industries] industries_ = pd.DataFrame(np.array(industries_).reshape(2,-1), columns=industry_w.columns, index=['行业','Industry']) summary = industry_w.describe()[1:].applymap(lambda x: round(x,4)) summary = pd.concat((industries_, summary),axis=0) return summary def quick_prepossessing(self): self.sql_queries() sindex = self.get_index() selected_tickers = self.ticker_filter(show_results=False) funds = self.get_funds(selected_tickers, sindex['sindex_r'], show_results=False) categorical = self.build_categories() return funds, categorical def save_files(self, path): date = self.funds.index[-1].strftime('%Y%m%d') self.funds.to_csv(path+f'funds_{date}.csv') self.categorical.to_csv(path+f'categorical_{date}.csv', index=False)
MIT License
authorizon/opal
opal_common/git/repo_cloner.py
RepoCloner.__init__
python
def __init__( self, repo_url: str, clone_path: str, branch_name: str = "master", retry_config = None, ssh_key: Optional[str] = None, ssh_key_file_path: Optional[str] = None, clone_timeout: int = 0, ): if repo_url is None: raise ValueError("must provide repo url!") self.url = repo_url self.path = os.path.expanduser(clone_path) self.branch_name = branch_name self._ssh_key = ssh_key self._ssh_key_file_path = ssh_key_file_path or opal_common_config.GIT_SSH_KEY_FILE self._retry_config = retry_config if retry_config is not None else self.DEFAULT_RETRY_CONFIG if clone_timeout > 0: self._retry_config.update({'stop': stop.stop_after_delay(clone_timeout)})
inits the repo cloner. Args: repo_url (str): the url to the remote repo we want to clone clone_path (str): the target local path in our file system we want the repo to be cloned to retry_config (dict): Tenacity.retry config (@see https://tenacity.readthedocs.io/en/latest/api.html#retry-main-api) ssh_key (str, optional): private ssh key used to gain access to the cloned repo ssh_key_file_path (str, optional): local path to save the private ssh key contents
https://github.com/authorizon/opal/blob/9b88bab67c1696308131f23d20f3fd6ddb62c0f1/opal_common/git/repo_cloner.py#L106-L137
import os import uuid import shutil import asyncio from functools import partial from typing import Optional, Generator from pathlib import Path from tenacity import retry, wait, stop, RetryError from git import Repo, GitError, GitCommandError from opal_common.logger import logger from opal_common.git.exceptions import GitFailed from opal_common.config import opal_common_config from opal_common.utils import get_filepaths_with_glob SSH_PREFIX = "ssh://" GIT_SSH_USER_PREFIX = "git@" def is_ssh_repo_url(repo_url: str): return repo_url.startswith(SSH_PREFIX) or repo_url.startswith(GIT_SSH_USER_PREFIX) class CloneResult: def __init__(self, repo: Repo): self._repo = repo @property def repo(self) -> Repo: return self._repo class RepoClonePathFinder: def __init__(self, base_clone_path: str, clone_subdirectory_prefix: str): if not base_clone_path: raise ValueError("base_clone_path cannot be empty!") if not clone_subdirectory_prefix: raise ValueError("clone_subdirectory_prefix cannot be empty!") self._base_clone_path = os.path.expanduser(base_clone_path) self._clone_subdirectory_prefix = clone_subdirectory_prefix def get_clone_subdirectories(self) -> Generator[str, None, None]: folders_with_pattern = get_filepaths_with_glob(self._base_clone_path, f"{self._clone_subdirectory_prefix}*") for folder in folders_with_pattern: yield folder def get_single_clone_path(self) -> Optional[str]: subdirectories = list(self.get_clone_subdirectories()) if len(subdirectories) != 1: return None return subdirectories[0] def create_new_clone_path(self) -> str: folder_name = f"{self._clone_subdirectory_prefix}-{uuid.uuid4().hex}" full_local_repo_path = os.path.join(self._base_clone_path, folder_name) os.makedirs(full_local_repo_path, exist_ok=True) return full_local_repo_path class RepoCloner: DEFAULT_RETRY_CONFIG = { 'wait': wait.wait_random_exponential(multiplier=0.5, max=30), }
Apache License 2.0
autodesk/aomi
aomi/error.py
output
python
def output(message, opt, extra=None): print(message, file=sys.stderr) if opt.verbose: if extra: print(extra) traceback.print_exc(sys.stderr)
Politely display an unexpected error
https://github.com/autodesk/aomi/blob/84da2dfb0424837adf9c4ddc1aa352e942bb7a4a/aomi/error.py#L25-L32
from __future__ import print_function import sys import traceback def unhandled(exception, opt): exmod = type(exception).__module__ name = "%s.%s" % (exmod, type(exception).__name__) if exmod == 'aomi.exceptions' or exmod == 'cryptorito': if hasattr(exception, 'source'): output(exception.message, opt, extra=exception.source) else: output(exception.message, opt) else: output("Unexpected error: %s" % name, opt) sys.exit(1)
MIT License
google-research/morph-net
morph_net/framework/concat_and_slice_regularizers.py
SlicingReferenceRegularizer.__init__
python
def __init__(self, get_regularizer_to_slice, begin, size): self._get_regularizer_to_slice = get_regularizer_to_slice self._begin = begin self._size = size self._alive_vector = None self._regularization_vector = None
Creates an instance. Args: get_regularizer_to_slice: A callable, such that get_regularizer_to_slice() returns an OpRegularizer that has to be sliced. begin: An integer, where to begin the slice. size: An integer, the length of the slice (so the slice ends at begin + size).
https://github.com/google-research/morph-net/blob/49c5679e03c79e56ac013c7b62a88e5d893b9d14/morph_net/framework/concat_and_slice_regularizers.py#L75-L89
from __future__ import absolute_import from __future__ import division from __future__ import print_function from morph_net.framework import generic_regularizers import tensorflow.compat.v1 as tf class ConcatRegularizer(generic_regularizers.OpRegularizer): def __init__(self, regularizers_to_concatenate): for r in regularizers_to_concatenate: if not generic_regularizers.dimensions_are_compatible(r): raise ValueError('Bad regularizer: dimensions are not compatible') self._alive_vector = tf.concat( [r.alive_vector for r in regularizers_to_concatenate], 0) self._regularization_vector = tf.concat( [r.regularization_vector for r in regularizers_to_concatenate], 0) @property def regularization_vector(self): return self._regularization_vector @property def alive_vector(self): return self._alive_vector class SlicingReferenceRegularizer(generic_regularizers.OpRegularizer):
Apache License 2.0
jsxlei/scale
scale/specifity.py
cluster_specific
python
def cluster_specific(score_mat, classes=None, top=0): scores = score_mat.max(1) peak_labels = np.argmax(score_mat.values, axis=1) inds = [] labels = [] if classes is None: classes = np.unique(peak_labels) for i in classes: index = np.where(peak_labels==i)[0] ind = np.argsort(scores[index])[-top:] ind = index[ind] inds.append(ind) labels.append(peak_labels[ind]) return np.concatenate(inds), np.concatenate(labels)
Identify top specific peaks for each cluster Input: score_mat calculated by mat_specificity_score Return: specific peaks index and peaks labels
https://github.com/jsxlei/scale/blob/ea9351387bf63bee45ede86199929f54dd673d3b/scale/specifity.py#L64-L85
import numpy as np import pandas as pd import scipy as sp def jsd(p, q, base=np.e): p, q = np.asarray(p), np.asarray(q) p, q = p/p.sum(), q/q.sum() m = 1./2*(p + q) return sp.stats.entropy(p,m, base=base)/2. + sp.stats.entropy(q, m, base=base)/2. def jsd_sp(p, q, base=np.e): return 1- jsd(p, q, base=np.e)**0.5 def log2norm(e): loge = np.log2(e+1) return loge/sum(loge) def predefined_pattern(t, labels): q = np.zeros(len(labels)) q[np.where(labels==t)[0]] = 1 return q def vec_specificity_score(e, t, labels): e = log2norm(e) et = log2norm(predefined_pattern(t, labels)) return jsd_sp(e, et) def mat_specificity_score(mat, labels): scores = [] for i in np.unique(labels): score = mat.apply(lambda x: vec_specificity_score(x, i, labels), axis=1) scores.append(score) return pd.concat(scores, axis=1)
MIT License
marineam/nagcat
python/twirrdy/twist.py
RRDTwistedAPI._update_cache
python
def _update_cache(self, filename, timestamp, values, defer=None): if defer is None: defer = self._defer if not defer: super(RRDTwistedAPI, self).update(filename, timestamp, values) else: assert self._client filename = self._escape_filename(filename) line = "UPDATE %s %s:%s" % (filename, int(timestamp), ':'.join(str(v) for v in values)) return self._client.sendLine(line)
Update via rrdcached
https://github.com/marineam/nagcat/blob/445d0efe1fb2ec93c31d1f9d8fa0c0563189ffaf/python/twirrdy/twist.py#L126-L140
import os import stat from twisted.internet import defer, error, reactor, threads from twirrdy import RRDBasicAPI, OrderedDict from twirrdy import protocol def issock(path): mode = os.stat(path)[stat.ST_MODE] return stat.S_ISSOCK(mode) class RRDTwistedAPI(RRDBasicAPI): def __init__(self, defer=True): self._defer = defer self._client = None self.update = self._update_direct def open(self, address, pidfile=None): deferred = defer.Deferred() self._client = protocol.RRDCacheClient(deferred) reactor.connectUNIX(address, self._client, checkPID=pidfile) self.update = self._update_cache return deferred def close(self): def filter_done(result): if isinstance(result.value, error.ConnectionDone): return None else: return result assert self._client self._client.stopTrying() deferred = self._client.sendLine('QUIT') deferred.addErrback(filter_done) self._client = None self.update = self._update_direct return deferred def flush(self, filename): assert self._client filename = self._escape_filename(filename) return self._client.sendLine("FLUSH %s" % filename) def pending(self, filename): def parse_value(value): if value == "U": return None else: return float(value) def parse_line(line): fields = line.split(':') ds_time = int(fields.pop(0)) ds_values = [parse_value(v) for v in fields] return ds_time, ds_values def parse_result(result): lines = result.splitlines() updates = [parse_line(l) for l in lines[1:]] updates.sort() return updates def mask_error(failure): if (isinstance(failure.value, protocol.RRDCacheError) and failure.value.args[0] == "-1 No such file or directory"): return [] else: return failure assert self._client filename = self._escape_filename(filename) d = self._client.sendLine("PENDING %s" % filename) d.addCallbacks(parse_result, mask_error) return d def _escape_filename(self, filename): return filename.replace('\\', '\\\\').replace(' ', '\\ ')
Apache License 2.0